hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dbad9faa45e58453d702ba0348f28704c2f33433
| 97,965
|
py
|
Python
|
h1/api/iam_organisation_role_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
h1/api/iam_organisation_role_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
h1/api/iam_organisation_role_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from h1.api_client import ApiClient, Endpoint as _Endpoint
from h1.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from h1.model.event import Event
from h1.model.iam_permission import IamPermission
from h1.model.iam_permission_array import IamPermissionArray
from h1.model.iam_project_role_create import IamProjectRoleCreate
from h1.model.iam_project_role_update import IamProjectRoleUpdate
from h1.model.inline_response400 import InlineResponse400
from h1.model.resource_service import ResourceService
from h1.model.role import Role
from h1.model.tag import Tag
from h1.model.tag_array import TagArray
class IamOrganisationRoleApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __iam_organisation_role_create(
self,
organisation_id,
iam_project_role_create,
**kwargs
):
"""Create iam/role # noqa: E501
Create role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_create(organisation_id, iam_project_role_create, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
iam_project_role_create (IamProjectRoleCreate):
Keyword Args:
x_idempotency_key (str): Idempotency key. [optional]
x_dry_run (str): Dry run. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Role
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['iam_project_role_create'] = \
iam_project_role_create
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_create = _Endpoint(
settings={
'response_type': (Role,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role',
'operation_id': 'iam_organisation_role_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'iam_project_role_create',
'x_idempotency_key',
'x_dry_run',
],
'required': [
'organisation_id',
'iam_project_role_create',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'iam_project_role_create':
(IamProjectRoleCreate,),
'x_idempotency_key':
(str,),
'x_dry_run':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'x_idempotency_key': 'x-idempotency-key',
'x_dry_run': 'x-dry-run',
},
'location_map': {
'organisation_id': 'path',
'iam_project_role_create': 'body',
'x_idempotency_key': 'header',
'x_dry_run': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__iam_organisation_role_create
)
def __iam_organisation_role_delete(
self,
organisation_id,
role_id,
**kwargs
):
"""Delete iam/role # noqa: E501
Delete role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_delete(organisation_id, role_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}',
'operation_id': 'iam_organisation_role_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
],
'required': [
'organisation_id',
'role_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_delete
)
def __iam_organisation_role_event_get(
self,
organisation_id,
role_id,
event_id,
**kwargs
):
"""Get iam/role.event # noqa: E501
Get iam/role.event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_event_get(organisation_id, role_id, event_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
event_id (str): eventId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Event
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['event_id'] = \
event_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_event_get = _Endpoint(
settings={
'response_type': (Event,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/event/{eventId}',
'operation_id': 'iam_organisation_role_event_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'event_id',
],
'required': [
'organisation_id',
'role_id',
'event_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'event_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
'event_id': 'eventId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'event_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_event_get
)
def __iam_organisation_role_event_list(
self,
organisation_id,
role_id,
**kwargs
):
"""List iam/role.event # noqa: E501
List iam/role.event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_event_list(organisation_id, role_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
Keyword Args:
limit (float): $limit. [optional] if omitted the server will use the default value of 100
skip (float): $skip. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Event]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_event_list = _Endpoint(
settings={
'response_type': ([Event],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/event',
'operation_id': 'iam_organisation_role_event_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'limit',
'skip',
],
'required': [
'organisation_id',
'role_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'limit',
]
},
root_map={
'validations': {
('limit',): {
'inclusive_maximum': 1000,
'inclusive_minimum': 1,
},
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'limit':
(float,),
'skip':
(float,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
'limit': '$limit',
'skip': '$skip',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'limit': 'query',
'skip': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_event_list
)
def __iam_organisation_role_get(
self,
organisation_id,
role_id,
**kwargs
):
"""Get iam/role # noqa: E501
Returns a single role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_get(organisation_id, role_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Role
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_get = _Endpoint(
settings={
'response_type': (Role,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}',
'operation_id': 'iam_organisation_role_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
],
'required': [
'organisation_id',
'role_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_get
)
def __iam_organisation_role_list(
self,
organisation_id,
**kwargs
):
"""List iam/role # noqa: E501
List role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_list(organisation_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
Keyword Args:
name (str): Filter by name. [optional]
tag_value (str): Filter by tag.value. [optional]
tag_key (str): Filter by tag.key. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Role]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_list = _Endpoint(
settings={
'response_type': ([Role],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role',
'operation_id': 'iam_organisation_role_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'name',
'tag_value',
'tag_key',
],
'required': [
'organisation_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'name':
(str,),
'tag_value':
(str,),
'tag_key':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'name': 'name',
'tag_value': 'tag.value',
'tag_key': 'tag.key',
},
'location_map': {
'organisation_id': 'path',
'name': 'query',
'tag_value': 'query',
'tag_key': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_list
)
def __iam_organisation_role_permission_create(
self,
organisation_id,
role_id,
iam_permission,
**kwargs
):
"""Create iam/role.permission # noqa: E501
Create iam/role.permission # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_permission_create(organisation_id, role_id, iam_permission, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
iam_permission (IamPermission):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
IamPermission
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['iam_permission'] = \
iam_permission
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_permission_create = _Endpoint(
settings={
'response_type': (IamPermission,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/permission',
'operation_id': 'iam_organisation_role_permission_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'iam_permission',
],
'required': [
'organisation_id',
'role_id',
'iam_permission',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'iam_permission':
(IamPermission,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'iam_permission': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__iam_organisation_role_permission_create
)
def __iam_organisation_role_permission_delete(
self,
organisation_id,
role_id,
permission_id,
**kwargs
):
"""Delete iam/role.permission # noqa: E501
Delete iam/role.permission # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_permission_delete(organisation_id, role_id, permission_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
permission_id (str): permissionId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
IamPermission
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['permission_id'] = \
permission_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_permission_delete = _Endpoint(
settings={
'response_type': (IamPermission,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/permission/{permissionId}',
'operation_id': 'iam_organisation_role_permission_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'permission_id',
],
'required': [
'organisation_id',
'role_id',
'permission_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'permission_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
'permission_id': 'permissionId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'permission_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_permission_delete
)
def __iam_organisation_role_permission_get(
self,
organisation_id,
role_id,
permission_id,
**kwargs
):
"""Get iam/role.permission # noqa: E501
Get iam/role.permission # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_permission_get(organisation_id, role_id, permission_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
permission_id (str): permissionId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
IamPermission
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['permission_id'] = \
permission_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_permission_get = _Endpoint(
settings={
'response_type': (IamPermission,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/permission/{permissionId}',
'operation_id': 'iam_organisation_role_permission_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'permission_id',
],
'required': [
'organisation_id',
'role_id',
'permission_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'permission_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
'permission_id': 'permissionId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'permission_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_permission_get
)
def __iam_organisation_role_permission_list(
self,
organisation_id,
role_id,
**kwargs
):
"""List iam/role.permission # noqa: E501
List iam/role.permission # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_permission_list(organisation_id, role_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[IamPermission]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_permission_list = _Endpoint(
settings={
'response_type': ([IamPermission],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/permission',
'operation_id': 'iam_organisation_role_permission_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
],
'required': [
'organisation_id',
'role_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_permission_list
)
def __iam_organisation_role_permission_put(
self,
organisation_id,
role_id,
iam_permission_array,
**kwargs
):
"""Replace iam/role.permission # noqa: E501
Replace iam/role.permission # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_permission_put(organisation_id, role_id, iam_permission_array, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
iam_permission_array (IamPermissionArray):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[IamPermission]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['iam_permission_array'] = \
iam_permission_array
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_permission_put = _Endpoint(
settings={
'response_type': ([IamPermission],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/permission',
'operation_id': 'iam_organisation_role_permission_put',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'iam_permission_array',
],
'required': [
'organisation_id',
'role_id',
'iam_permission_array',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'iam_permission_array':
(IamPermissionArray,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'iam_permission_array': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__iam_organisation_role_permission_put
)
def __iam_organisation_role_service_get(
self,
organisation_id,
role_id,
service_id,
**kwargs
):
"""Get iam/role.service # noqa: E501
Get iam/role.service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_service_get(organisation_id, role_id, service_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
service_id (str): serviceId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ResourceService
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['service_id'] = \
service_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_service_get = _Endpoint(
settings={
'response_type': (ResourceService,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/service/{serviceId}',
'operation_id': 'iam_organisation_role_service_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'service_id',
],
'required': [
'organisation_id',
'role_id',
'service_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'service_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
'service_id': 'serviceId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'service_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_service_get
)
def __iam_organisation_role_service_list(
self,
organisation_id,
role_id,
**kwargs
):
"""List iam/role.service # noqa: E501
List iam/role.service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_service_list(organisation_id, role_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[ResourceService]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_service_list = _Endpoint(
settings={
'response_type': ([ResourceService],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/service',
'operation_id': 'iam_organisation_role_service_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
],
'required': [
'organisation_id',
'role_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_service_list
)
def __iam_organisation_role_tag_create(
self,
organisation_id,
role_id,
tag,
**kwargs
):
"""Create iam/role.tag # noqa: E501
Create iam/role.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_tag_create(organisation_id, role_id, tag, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
tag (Tag):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Tag
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['tag'] = \
tag
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_tag_create = _Endpoint(
settings={
'response_type': (Tag,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/tag',
'operation_id': 'iam_organisation_role_tag_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'tag',
],
'required': [
'organisation_id',
'role_id',
'tag',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'tag':
(Tag,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'tag': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__iam_organisation_role_tag_create
)
def __iam_organisation_role_tag_delete(
self,
organisation_id,
role_id,
tag_id,
**kwargs
):
"""Delete iam/role.tag # noqa: E501
Delete iam/role.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_tag_delete(organisation_id, role_id, tag_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
tag_id (str): tagId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['tag_id'] = \
tag_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_tag_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/tag/{tagId}',
'operation_id': 'iam_organisation_role_tag_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'tag_id',
],
'required': [
'organisation_id',
'role_id',
'tag_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'tag_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
'tag_id': 'tagId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'tag_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_tag_delete
)
def __iam_organisation_role_tag_get(
self,
organisation_id,
role_id,
tag_id,
**kwargs
):
"""Get iam/role.tag # noqa: E501
Get iam/role.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_tag_get(organisation_id, role_id, tag_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
tag_id (str): tagId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Tag
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['tag_id'] = \
tag_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_tag_get = _Endpoint(
settings={
'response_type': (Tag,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/tag/{tagId}',
'operation_id': 'iam_organisation_role_tag_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'tag_id',
],
'required': [
'organisation_id',
'role_id',
'tag_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'tag_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
'tag_id': 'tagId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'tag_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_tag_get
)
def __iam_organisation_role_tag_list(
self,
organisation_id,
role_id,
**kwargs
):
"""List iam/role.tag # noqa: E501
List iam/role.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_tag_list(organisation_id, role_id, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Tag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_tag_list = _Endpoint(
settings={
'response_type': ([Tag],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/tag',
'operation_id': 'iam_organisation_role_tag_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
],
'required': [
'organisation_id',
'role_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__iam_organisation_role_tag_list
)
def __iam_organisation_role_tag_put(
self,
organisation_id,
role_id,
tag_array,
**kwargs
):
"""Replace iam/role.tag # noqa: E501
Replace iam/role.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_tag_put(organisation_id, role_id, tag_array, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
tag_array (TagArray):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Tag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['tag_array'] = \
tag_array
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_tag_put = _Endpoint(
settings={
'response_type': ([Tag],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}/tag',
'operation_id': 'iam_organisation_role_tag_put',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'tag_array',
],
'required': [
'organisation_id',
'role_id',
'tag_array',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'tag_array':
(TagArray,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'tag_array': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__iam_organisation_role_tag_put
)
def __iam_organisation_role_update(
self,
organisation_id,
role_id,
iam_project_role_update,
**kwargs
):
"""Update iam/role # noqa: E501
Returns modified role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.iam_organisation_role_update(organisation_id, role_id, iam_project_role_update, async_req=True)
>>> result = thread.get()
Args:
organisation_id (str): Organisation Id
role_id (str): Role Id
iam_project_role_update (IamProjectRoleUpdate):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Role
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organisation_id'] = \
organisation_id
kwargs['role_id'] = \
role_id
kwargs['iam_project_role_update'] = \
iam_project_role_update
return self.call_with_http_info(**kwargs)
self.iam_organisation_role_update = _Endpoint(
settings={
'response_type': (Role,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/iam/organisation/{organisationId}/role/{roleId}',
'operation_id': 'iam_organisation_role_update',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'organisation_id',
'role_id',
'iam_project_role_update',
],
'required': [
'organisation_id',
'role_id',
'iam_project_role_update',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organisation_id':
(str,),
'role_id':
(str,),
'iam_project_role_update':
(IamProjectRoleUpdate,),
},
'attribute_map': {
'organisation_id': 'organisationId',
'role_id': 'roleId',
},
'location_map': {
'organisation_id': 'path',
'role_id': 'path',
'iam_project_role_update': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__iam_organisation_role_update
)
| 36.940045
| 129
| 0.454111
| 8,178
| 97,965
| 5.147347
| 0.026535
| 0.069509
| 0.019385
| 0.040385
| 0.953177
| 0.930562
| 0.894809
| 0.890082
| 0.877967
| 0.877967
| 0
| 0.003309
| 0.463319
| 97,965
| 2,651
| 130
| 36.95398
| 0.797341
| 0.300628
| 0
| 0.705722
| 1
| 0
| 0.236937
| 0.051692
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010899
| false
| 0
| 0.007629
| 0
| 0.029428
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
918645a5bc3b5d7c77c244178a16776f68d05f3b
| 4,400
|
py
|
Python
|
examples/legacy_examples/dagster_examples/common/resources.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | 1
|
2021-04-27T19:49:59.000Z
|
2021-04-27T19:49:59.000Z
|
examples/legacy_examples/dagster_examples/common/resources.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | 7
|
2022-03-16T06:55:04.000Z
|
2022-03-18T07:03:25.000Z
|
examples/legacy_examples/dagster_examples/common/resources.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | null | null | null |
from collections import namedtuple
import sqlalchemy
from dagster import Field, IntSource, StringSource, resource
DbInfo = namedtuple("DbInfo", "engine url jdbc_url dialect load_table host db_name")
def create_redshift_db_url(username, password, hostname, port, db_name, jdbc=True):
if jdbc:
db_url = (
"jdbc:postgresql://{hostname}:{port}/{db_name}?"
"user={username}&password={password}".format(
username=username, password=password, hostname=hostname, port=port, db_name=db_name
)
)
else:
db_url = "redshift+psycopg2://{username}:{password}@{hostname}:{port}/{db_name}".format(
username=username, password=password, hostname=hostname, port=port, db_name=db_name
)
return db_url
def create_redshift_engine(db_url):
return sqlalchemy.create_engine(db_url)
def create_postgres_db_url(username, password, hostname, port, db_name, jdbc=True):
if jdbc:
db_url = (
"jdbc:postgresql://{hostname}:{port}/{db_name}?"
"user={username}&password={password}".format(
username=username, password=password, hostname=hostname, port=port, db_name=db_name
)
)
else:
db_url = "postgresql://{username}:{password}@{hostname}:{port}/{db_name}".format(
username=username, password=password, hostname=hostname, port=port, db_name=db_name
)
return db_url
def create_postgres_engine(db_url):
return sqlalchemy.create_engine(db_url)
@resource(
{
"username": Field(StringSource),
"password": Field(StringSource),
"hostname": Field(StringSource),
"port": Field(IntSource, is_required=False, default_value=5439),
"db_name": Field(StringSource),
"s3_temp_dir": Field(str),
}
)
def redshift_db_info_resource(init_context):
host = init_context.resource_config["hostname"]
db_name = init_context.resource_config["db_name"]
db_url_jdbc = create_redshift_db_url(
username=init_context.resource_config["username"],
password=init_context.resource_config["password"],
hostname=host,
port=init_context.resource_config["port"],
db_name=db_name,
)
db_url = create_redshift_db_url(
username=init_context.resource_config["username"],
password=init_context.resource_config["password"],
hostname=host,
port=init_context.resource_config["port"],
db_name=db_name,
jdbc=False,
)
s3_temp_dir = init_context.resource_config["s3_temp_dir"]
def _do_load(data_frame, table_name):
data_frame.write.format("com.databricks.spark.redshift").option(
"tempdir", s3_temp_dir
).mode("overwrite").jdbc(db_url_jdbc, table_name)
return DbInfo(
url=db_url,
jdbc_url=db_url_jdbc,
engine=create_redshift_engine(db_url),
dialect="redshift",
load_table=_do_load,
host=host,
db_name=db_name,
)
@resource(
{
"username": Field(StringSource),
"password": Field(StringSource),
"hostname": Field(StringSource),
"port": Field(IntSource, is_required=False, default_value=5432),
"db_name": Field(StringSource),
}
)
def postgres_db_info_resource(init_context):
host = init_context.resource_config["hostname"]
db_name = init_context.resource_config["db_name"]
db_url_jdbc = create_postgres_db_url(
username=init_context.resource_config["username"],
password=init_context.resource_config["password"],
hostname=host,
port=init_context.resource_config["port"],
db_name=db_name,
)
db_url = create_postgres_db_url(
username=init_context.resource_config["username"],
password=init_context.resource_config["password"],
hostname=host,
port=init_context.resource_config["port"],
db_name=db_name,
jdbc=False,
)
def _do_load(data_frame, table_name):
data_frame.write.option("driver", "org.postgresql.Driver").mode("overwrite").jdbc(
db_url_jdbc, table_name
)
return DbInfo(
url=db_url,
jdbc_url=db_url_jdbc,
engine=create_postgres_engine(db_url),
dialect="postgres",
load_table=_do_load,
host=host,
db_name=db_name,
)
| 31.205674
| 99
| 0.656364
| 519
| 4,400
| 5.246628
| 0.123314
| 0.072714
| 0.118619
| 0.156078
| 0.837679
| 0.806463
| 0.806463
| 0.806463
| 0.806463
| 0.774146
| 0
| 0.003819
| 0.226364
| 4,400
| 140
| 100
| 31.428571
| 0.796122
| 0
| 0
| 0.586207
| 0
| 0
| 0.1475
| 0.077955
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0.137931
| 0.025862
| 0.017241
| 0.146552
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
37d88691e2ef2d991189d63b8a5bcd3935f64c50
| 1,162
|
py
|
Python
|
rosalind/1-counting-dna-nucleotides/counting.py
|
rodrishud/bioinformatics
|
968f9e09689227f0c52e46c2cc64ebd18129185b
|
[
"MIT"
] | 1
|
2021-11-05T12:25:54.000Z
|
2021-11-05T12:25:54.000Z
|
rosalind/1-counting-dna-nucleotides/counting.py
|
rodrishud/bioinformatics
|
968f9e09689227f0c52e46c2cc64ebd18129185b
|
[
"MIT"
] | 1
|
2021-11-05T12:48:22.000Z
|
2021-11-05T12:48:22.000Z
|
rosalind/1-counting-dna-nucleotides/counting.py
|
rodrishud/bioinformatics
|
968f9e09689227f0c52e46c2cc64ebd18129185b
|
[
"MIT"
] | 1
|
2021-11-05T12:26:03.000Z
|
2021-11-05T12:26:03.000Z
|
dna = "TTAGCTAACATCCCTGTCCTCCGTTGCCCGGATGAATGCGTTTCGATACCATGGTCAACCTCTGTTTTTTGCGAGTATATCTACAAATTGCCAGAAAGTAACGAGGTCGGCCTCCCAACTTCTTTTGAGCGTCAGCAAGCGTACAACAAGCACTAGTGCGGCGATCAAGTCTGAGACTGCCGCGGGGACCCTATCGTGTCAAACGCGAACATTCTCTTAGGTGATGGCAACGAACAGATGTTACGCCCTGCGCTCTTAACTAGGAGGCTTGTGGAGGTACACAACGTGATGTCTGTGAAGATCCCCGAAGTAGCTCGGTGCACCGAAAAAAAGTCTCTGTCTTACGAGACACTGGTCCGATCATCCGGGGATAACTTCCGGGCAGCTTGCCTCAATGGGGCGCTTGCAAGGTCGGATCTGCGCTTATCTTACATGTGCGAATGGAGTTAACCAGTGCGCAAACCCGGGTCATTGGCCGATAGGGTAGATAACAGTACGCATACAGCTTCCGGTGATGGCACACCGTTATCGACCACTCCGTCTACACCAACAGTTCTGGCGGAGTGAGCGGAGTGAACGGCTTACTTACAAAGTGGCCCAATTCAAGCTGGGACGGTTAACTTCAACCGCCCGTTAAAGTAGCGTAGTGGCTATACTCGCCCACTAGGCGTCTCCGCTATTGTATGGGAGTATTTGCAGAGAAGTACGAGAAACAGATCGAGTATTGCTCCGTTTAACGCGGAATTTCCCCCCTGGAGGGAGGTATTTAACCATGGCTTCGGTGTACGGCGCTTTTACTTCCTGGTTACGCAAAGACGAGCCGCCCATCGTTTAACCTGCGCCATTCGTGGTATCCAGAAGATAAGAAAGCTTATACTGTGACCATAAGCTTTAGCCGCTACTTTCTATACATATATTTATCCGTGGTACTCATTGCTAAGTTGTAGGTTGGCAGGGCTGACCTTTGACCCTTGCAAAGCGATGGCCCAA"
countA = str(dna.count("A"))
countT = str(dna.count("T"))
countC = str(dna.count("C"))
countG = str(dna.count("G"))
print(f'{countA} {countC} {countG} {countT}')
| 145.25
| 998
| 0.942341
| 28
| 1,162
| 39.107143
| 0.5
| 0.021918
| 0.040183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017212
| 1,162
| 8
| 999
| 145.25
| 0.958844
| 0
| 0
| 0
| 0
| 0
| 0.884781
| 0.851247
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37ff392fba5735bd307430769ee93b17ef2951e2
| 22,195
|
py
|
Python
|
memsource_cli/api/segmentation_rules_api.py
|
unofficial-memsource/memsource-cli-client
|
a6639506b74e95476da87f4375953448b76ea90c
|
[
"Apache-2.0"
] | 16
|
2019-09-25T00:20:38.000Z
|
2021-05-04T05:56:10.000Z
|
memsource_cli/api/segmentation_rules_api.py
|
zerodayz/memsource-cli-client
|
c2574f1467539a49e6637c874e88d75c7ef789b3
|
[
"Apache-2.0"
] | 26
|
2019-09-30T14:00:03.000Z
|
2021-05-12T11:15:18.000Z
|
memsource_cli/api/segmentation_rules_api.py
|
zerodayz/memsource-cli-client
|
c2574f1467539a49e6637c874e88d75c7ef789b3
|
[
"Apache-2.0"
] | 1
|
2021-05-24T16:19:14.000Z
|
2021-05-24T16:19:14.000Z
|
# coding: utf-8
"""
Memsource REST API
Welcome to Memsource's API documentation. To view our legacy APIs please [visit our documentation](https://wiki.memsource.com/wiki/Memsource_API) and for more information about our new APIs, [visit our blog](https://www.memsource.com/blog/2017/10/24/introducing-rest-apis-qa-with-the-memsource-api-team/). If you have any questions, please contact [Memsource Support](<mailto:support@memsource.com>). # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from memsource_cli.api_client import ApiClient
class SegmentationRulesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_segmentation_rule(self, body, seg_rule, **kwargs): # noqa: E501
"""Create segmentation rule # noqa: E501
Creates new Segmentation Rule with file and segRule JSON Object as header parameter. The same object is used for GET action. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_segmentation_rule(body, seg_rule, async_req=True)
>>> result = thread.get()
:param async_req bool
:param InputStream body: streamed file (required)
:param str seg_rule: (required)
:return: SegmentationRuleDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_segmentation_rule_with_http_info(body, seg_rule, **kwargs) # noqa: E501
else:
(data) = self.create_segmentation_rule_with_http_info(body, seg_rule, **kwargs) # noqa: E501
return data
def create_segmentation_rule_with_http_info(self, body, seg_rule, **kwargs): # noqa: E501
"""Create segmentation rule # noqa: E501
Creates new Segmentation Rule with file and segRule JSON Object as header parameter. The same object is used for GET action. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_segmentation_rule_with_http_info(body, seg_rule, async_req=True)
>>> result = thread.get()
:param async_req bool
:param InputStream body: streamed file (required)
:param str seg_rule: (required)
:return: SegmentationRuleDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'seg_rule'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_segmentation_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_segmentation_rule`") # noqa: E501
# verify the required parameter 'seg_rule' is set
if ('seg_rule' not in params or
params['seg_rule'] is None):
raise ValueError("Missing the required parameter `seg_rule` when calling `create_segmentation_rule`") # noqa: E501
if ('seg_rule' in params and
len(params['seg_rule']) > 255):
raise ValueError("Invalid value for parameter `seg_rule` when calling `create_segmentation_rule`, length must be less than or equal to `255`") # noqa: E501
if ('seg_rule' in params and
len(params['seg_rule']) < 0):
raise ValueError("Invalid value for parameter `seg_rule` when calling `create_segmentation_rule`, length must be greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'seg_rule' in params:
header_params['segRule'] = params['seg_rule'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/segmentationRules', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SegmentationRuleDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def deletes_segmentation_rule(self, seg_rule_id, **kwargs): # noqa: E501
"""Delete segmentation rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_segmentation_rule(seg_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int seg_rule_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deletes_segmentation_rule_with_http_info(seg_rule_id, **kwargs) # noqa: E501
else:
(data) = self.deletes_segmentation_rule_with_http_info(seg_rule_id, **kwargs) # noqa: E501
return data
def deletes_segmentation_rule_with_http_info(self, seg_rule_id, **kwargs): # noqa: E501
"""Delete segmentation rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_segmentation_rule_with_http_info(seg_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int seg_rule_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['seg_rule_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method deletes_segmentation_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'seg_rule_id' is set
if ('seg_rule_id' not in params or
params['seg_rule_id'] is None):
raise ValueError("Missing the required parameter `seg_rule_id` when calling `deletes_segmentation_rule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'seg_rule_id' in params:
path_params['segRuleId'] = params['seg_rule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/segmentationRules/{segRuleId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_list_of_segmentation_rules(self, **kwargs): # noqa: E501
"""List segmentation rules # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_of_segmentation_rules(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_number: Page number, starting with 0, default 0
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoSegmentationRuleReference
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_list_of_segmentation_rules_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_list_of_segmentation_rules_with_http_info(**kwargs) # noqa: E501
return data
def get_list_of_segmentation_rules_with_http_info(self, **kwargs): # noqa: E501
"""List segmentation rules # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_of_segmentation_rules_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_number: Page number, starting with 0, default 0
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoSegmentationRuleReference
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_list_of_segmentation_rules" % key
)
params[key] = val
del params['kwargs']
if 'page_number' in params and params['page_number'] < 0: # noqa: E501
raise ValueError("Invalid value for parameter `page_number` when calling `get_list_of_segmentation_rules`, must be a value greater than or equal to `0`") # noqa: E501
if 'page_size' in params and params['page_size'] > 50: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `get_list_of_segmentation_rules`, must be a value less than or equal to `50`") # noqa: E501
if 'page_size' in params and params['page_size'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `get_list_of_segmentation_rules`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_number' in params:
query_params.append(('pageNumber', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/segmentationRules', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDtoSegmentationRuleReference', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_segmentation_rule(self, seg_rule_id, **kwargs): # noqa: E501
"""Get segmentation rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_segmentation_rule(seg_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int seg_rule_id: (required)
:return: SegmentationRuleDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_segmentation_rule_with_http_info(seg_rule_id, **kwargs) # noqa: E501
else:
(data) = self.get_segmentation_rule_with_http_info(seg_rule_id, **kwargs) # noqa: E501
return data
def get_segmentation_rule_with_http_info(self, seg_rule_id, **kwargs): # noqa: E501
"""Get segmentation rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_segmentation_rule_with_http_info(seg_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int seg_rule_id: (required)
:return: SegmentationRuleDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['seg_rule_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_segmentation_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'seg_rule_id' is set
if ('seg_rule_id' not in params or
params['seg_rule_id'] is None):
raise ValueError("Missing the required parameter `seg_rule_id` when calling `get_segmentation_rule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'seg_rule_id' in params:
path_params['segRuleId'] = params['seg_rule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/segmentationRules/{segRuleId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SegmentationRuleDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def updates_segmentation_rule(self, seg_rule_id, **kwargs): # noqa: E501
"""Edit segmentation rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.updates_segmentation_rule(seg_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int seg_rule_id: (required)
:param EditSegmentationRuleDto body:
:return: SegmentationRuleDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.updates_segmentation_rule_with_http_info(seg_rule_id, **kwargs) # noqa: E501
else:
(data) = self.updates_segmentation_rule_with_http_info(seg_rule_id, **kwargs) # noqa: E501
return data
def updates_segmentation_rule_with_http_info(self, seg_rule_id, **kwargs): # noqa: E501
"""Edit segmentation rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.updates_segmentation_rule_with_http_info(seg_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int seg_rule_id: (required)
:param EditSegmentationRuleDto body:
:return: SegmentationRuleDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['seg_rule_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method updates_segmentation_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'seg_rule_id' is set
if ('seg_rule_id' not in params or
params['seg_rule_id'] is None):
raise ValueError("Missing the required parameter `seg_rule_id` when calling `updates_segmentation_rule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'seg_rule_id' in params:
path_params['segRuleId'] = params['seg_rule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/segmentationRules/{segRuleId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SegmentationRuleDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.950185
| 421
| 0.622618
| 2,615
| 22,195
| 5.018356
| 0.084512
| 0.051817
| 0.030862
| 0.027433
| 0.921359
| 0.906348
| 0.895908
| 0.878915
| 0.860245
| 0.851787
| 0
| 0.019579
| 0.288939
| 22,195
| 541
| 422
| 41.025878
| 0.811938
| 0.321604
| 0
| 0.742268
| 0
| 0.017182
| 0.226686
| 0.068795
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037801
| false
| 0
| 0.013746
| 0
| 0.106529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5329da10ec194f3df034ca2df34d3629e7d0fe41
| 76,072
|
py
|
Python
|
saleor/graphql/meta/tests/test_meta_queries.py
|
angeles-ricardo-89/saleor
|
5fab7a883d025bff83320fbdd557ed7afa2923a9
|
[
"BSD-3-Clause"
] | 3
|
2019-01-24T11:41:58.000Z
|
2019-11-10T13:12:24.000Z
|
saleor/graphql/meta/tests/test_meta_queries.py
|
DuongHieuMAI/saleor
|
e20b6283182f3a2886fe36fcdef8e47e4fcf7a14
|
[
"CC-BY-4.0"
] | 11
|
2021-03-30T14:26:57.000Z
|
2022-03-12T00:51:07.000Z
|
saleor/graphql/meta/tests/test_meta_queries.py
|
DuongHieuMAI/saleor
|
e20b6283182f3a2886fe36fcdef8e47e4fcf7a14
|
[
"CC-BY-4.0"
] | 12
|
2019-03-21T03:24:58.000Z
|
2022-01-13T10:55:34.000Z
|
import graphene
from ...tests.utils import assert_no_permission, get_graphql_content
PRIVATE_KEY = "private_key"
PRIVATE_VALUE = "private_vale"
PUBLIC_KEY = "key"
PUBLIC_VALUE = "value"
QUERY_SELF_PUBLIC_META = """
{
me{
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_me_as_customer(user_api_client):
# given
me = user_api_client.user
me.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
me.save(update_fields=["metadata"])
# when
response = user_api_client.post_graphql(QUERY_SELF_PUBLIC_META)
content = get_graphql_content(response)
# then
metadata = content["data"]["me"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_me_as_staff(staff_api_client):
# given
me = staff_api_client.user
me.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
me.save(update_fields=["metadata"])
# when
response = staff_api_client.post_graphql(QUERY_SELF_PUBLIC_META)
content = get_graphql_content(response)
# then
metadata = content["data"]["me"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_USER_PUBLIC_META = """
query userMeta($id: ID!){
user(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_customer_as_staff(
staff_api_client, permission_manage_users, customer_user
):
# given
customer_user.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
customer_user.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("User", customer_user.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_USER_PUBLIC_META, variables, [permission_manage_users]
)
content = get_graphql_content(response)
# then
metadata = content["data"]["user"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_customer_as_app(
app_api_client, permission_manage_users, customer_user
):
# given
customer_user.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
customer_user.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("User", customer_user.pk)}
# when
response = app_api_client.post_graphql(
QUERY_USER_PUBLIC_META, variables, [permission_manage_users]
)
content = get_graphql_content(response)
# then
metadata = content["data"]["user"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_staff_as_other_staff(
staff_api_client, permission_manage_staff, admin_user
):
# given
admin_user.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
admin_user.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("User", admin_user.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_USER_PUBLIC_META, variables, [permission_manage_staff]
)
content = get_graphql_content(response)
# then
metadata = content["data"]["user"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_staff_as_app(
app_api_client, permission_manage_staff, admin_user
):
# given
admin_user.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
admin_user.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("User", admin_user.pk)}
# when
response = app_api_client.post_graphql(
QUERY_USER_PUBLIC_META, variables, [permission_manage_staff]
)
# then
assert_no_permission(response)
QUERY_CHECKOUT_PUBLIC_META = """
query checkoutMeta($token: UUID!){
checkout(token: $token){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_checkout_as_anonymous_user(api_client, checkout):
# given
checkout.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
checkout.save(update_fields=["metadata"])
variables = {"token": checkout.pk}
# when
response = api_client.post_graphql(QUERY_CHECKOUT_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["checkout"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_other_customer_checkout_as_anonymous_user(
api_client, checkout, customer_user
):
# given
checkout.user = customer_user
checkout.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
checkout.save(update_fields=["user", "metadata"])
variables = {"token": checkout.pk}
# when
response = api_client.post_graphql(QUERY_CHECKOUT_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
assert not content["data"]["checkout"]
def test_query_public_meta_for_checkout_as_customer(user_api_client, checkout):
# given
checkout.user = user_api_client.user
checkout.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
checkout.save(update_fields=["user", "metadata"])
variables = {"token": checkout.pk}
# when
response = user_api_client.post_graphql(QUERY_CHECKOUT_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["checkout"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_checkout_as_staff(
staff_api_client, checkout, customer_user, permission_manage_checkouts
):
# given
checkout.user = customer_user
checkout.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
checkout.save(update_fields=["user", "metadata"])
variables = {"token": checkout.pk}
# when
response = staff_api_client.post_graphql(
QUERY_CHECKOUT_PUBLIC_META,
variables,
[permission_manage_checkouts],
check_no_permissions=False, # Remove after fix #5245
)
content = get_graphql_content(response)
# then
metadata = content["data"]["checkout"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_checkout_as_app(
app_api_client, checkout, customer_user, permission_manage_checkouts
):
# given
checkout.user = customer_user
checkout.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
checkout.save(update_fields=["user", "metadata"])
variables = {"token": checkout.pk}
# when
response = app_api_client.post_graphql(
QUERY_CHECKOUT_PUBLIC_META,
variables,
[permission_manage_checkouts],
check_no_permissions=False, # Remove after fix #5245
)
content = get_graphql_content(response)
# then
metadata = content["data"]["checkout"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_ORDER_BY_TOKEN_PUBLIC_META = """
query orderMeta($token: UUID!){
orderByToken(token: $token){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_order_by_token_as_anonymous_user(api_client, order):
# given
order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
order.save(update_fields=["metadata"])
variables = {"token": order.token}
# when
response = api_client.post_graphql(QUERY_ORDER_BY_TOKEN_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_order_by_token_as_customer(user_api_client, order):
# given
order.user = user_api_client.user
order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
order.save(update_fields=["user", "metadata"])
variables = {"token": order.token}
# when
response = user_api_client.post_graphql(QUERY_ORDER_BY_TOKEN_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_order_by_token_as_staff(
staff_api_client, order, customer_user, permission_manage_orders
):
# given
order.user = customer_user
order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
order.save(update_fields=["user", "metadata"])
variables = {"token": order.token}
# when
response = staff_api_client.post_graphql(
QUERY_ORDER_BY_TOKEN_PUBLIC_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_order_by_token_as_app(
app_api_client, order, customer_user, permission_manage_orders
):
# given
order.user = customer_user
order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
order.save(update_fields=["user", "metadata"])
variables = {"token": order.token}
# when
response = app_api_client.post_graphql(
QUERY_ORDER_BY_TOKEN_PUBLIC_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_ORDER_PUBLIC_META = """
query orderMeta($id: ID!){
order(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_order_as_anonymous_user(api_client, order):
# given
variables = {"id": graphene.Node.to_global_id("Order", order.pk)}
# when
response = api_client.post_graphql(QUERY_ORDER_PUBLIC_META, variables)
# then
assert_no_permission(response)
def test_query_public_meta_for_order_as_customer(user_api_client, order):
# given
order.user = user_api_client.user
order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
order.save(update_fields=["user", "metadata"])
variables = {"id": graphene.Node.to_global_id("Order", order.pk)}
# when
response = user_api_client.post_graphql(QUERY_ORDER_PUBLIC_META, variables)
# then
assert_no_permission(response)
def test_query_public_meta_for_order_as_staff(
staff_api_client, order, customer_user, permission_manage_orders
):
# given
order.user = customer_user
order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
order.save(update_fields=["user", "metadata"])
variables = {"id": graphene.Node.to_global_id("Order", order.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_ORDER_PUBLIC_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["order"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_order_as_app(
app_api_client, order, customer_user, permission_manage_orders
):
# given
order.user = customer_user
order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
order.save(update_fields=["user", "metadata"])
variables = {"id": graphene.Node.to_global_id("Order", order.pk)}
# when
response = app_api_client.post_graphql(
QUERY_ORDER_PUBLIC_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["order"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_DRAFT_ORDER_PUBLIC_META = """
query draftOrderMeta($id: ID!){
order(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_draft_order_as_anonymous_user(api_client, draft_order):
# given
variables = {"id": graphene.Node.to_global_id("Order", draft_order.pk)}
# when
response = api_client.post_graphql(QUERY_DRAFT_ORDER_PUBLIC_META, variables)
# then
assert_no_permission(response)
def test_query_public_meta_for_draft_order_as_customer(user_api_client, draft_order):
# given
draft_order.user = user_api_client.user
draft_order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
draft_order.save(update_fields=["user", "metadata"])
variables = {"id": graphene.Node.to_global_id("Order", draft_order.pk)}
# when
response = user_api_client.post_graphql(QUERY_DRAFT_ORDER_PUBLIC_META, variables)
# then
assert_no_permission(response)
def test_query_public_meta_for_draft_order_as_staff(
staff_api_client, draft_order, customer_user, permission_manage_orders
):
# given
draft_order.user = customer_user
draft_order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
draft_order.save(update_fields=["user", "metadata"])
variables = {"id": graphene.Node.to_global_id("Order", draft_order.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_DRAFT_ORDER_PUBLIC_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["order"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_draft_order_as_app(
app_api_client, draft_order, customer_user, permission_manage_orders
):
# given
draft_order.user = customer_user
draft_order.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
draft_order.save(update_fields=["user", "metadata"])
variables = {"id": graphene.Node.to_global_id("Order", draft_order.pk)}
# when
response = app_api_client.post_graphql(
QUERY_ORDER_PUBLIC_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["order"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_FULFILLMENT_PUBLIC_META = """
query fulfillmentMeta($token: UUID!){
orderByToken(token: $token){
fulfillments{
metadata{
key
value
}
}
}
}
"""
def test_query_public_meta_for_fulfillment_as_anonymous_user(
api_client, fulfilled_order
):
# given
fulfillment = fulfilled_order.fulfillments.first()
fulfillment.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
fulfillment.save(update_fields=["metadata"])
variables = {"token": fulfilled_order.token}
# when
response = api_client.post_graphql(QUERY_FULFILLMENT_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["fulfillments"][0]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_fulfillment_as_customer(
user_api_client, fulfilled_order
):
# given
fulfillment = fulfilled_order.fulfillments.first()
fulfillment.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
fulfillment.save(update_fields=["metadata"])
fulfilled_order.user = user_api_client.user
fulfilled_order.save(update_fields=["user"])
variables = {"token": fulfilled_order.token}
# when
response = user_api_client.post_graphql(QUERY_FULFILLMENT_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["fulfillments"][0]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_fulfillment_as_staff(
staff_api_client, fulfilled_order, customer_user, permission_manage_orders
):
# given
fulfillment = fulfilled_order.fulfillments.first()
fulfillment.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
fulfillment.save(update_fields=["metadata"])
fulfilled_order.user = customer_user
fulfilled_order.save(update_fields=["user"])
variables = {"token": fulfilled_order.token}
# when
response = staff_api_client.post_graphql(
QUERY_FULFILLMENT_PUBLIC_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["fulfillments"][0]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_fulfillment_as_app(
app_api_client, fulfilled_order, customer_user, permission_manage_orders
):
# given
fulfillment = fulfilled_order.fulfillments.first()
fulfillment.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
fulfillment.save(update_fields=["metadata"])
fulfilled_order.user = customer_user
fulfilled_order.save(update_fields=["user"])
variables = {"token": fulfilled_order.token}
# when
response = app_api_client.post_graphql(
QUERY_FULFILLMENT_PUBLIC_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["fulfillments"][0]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_ATTRIBUTE_PUBLIC_META = """
query attributeMeta($id: ID!){
attribute(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_attribute_as_anonymous_user(api_client, color_attribute):
# given
color_attribute.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
color_attribute.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Attribute", color_attribute.pk)}
# when
response = api_client.post_graphql(QUERY_ATTRIBUTE_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["attribute"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_attribute_as_customer(user_api_client, color_attribute):
# given
color_attribute.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
color_attribute.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Attribute", color_attribute.pk)}
# when
response = user_api_client.post_graphql(QUERY_ATTRIBUTE_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["attribute"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_attribute_as_staff(
staff_api_client, color_attribute, permission_manage_products
):
# given
color_attribute.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
color_attribute.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Attribute", color_attribute.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_ATTRIBUTE_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["attribute"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_attribute_as_app(
app_api_client, color_attribute, permission_manage_products
):
# given
color_attribute.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
color_attribute.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Attribute", color_attribute.pk)}
# when
response = app_api_client.post_graphql(
QUERY_ATTRIBUTE_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["attribute"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_CATEGORY_PUBLIC_META = """
query categoryMeta($id: ID!){
category(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_category_as_anonymous_user(api_client, category):
# given
category.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
category.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = api_client.post_graphql(QUERY_CATEGORY_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["category"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_category_as_customer(user_api_client, category):
# given
category.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
category.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = user_api_client.post_graphql(QUERY_CATEGORY_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["category"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_category_as_staff(
staff_api_client, category, permission_manage_products
):
# given
category.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
category.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_CATEGORY_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["category"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_category_as_app(
app_api_client, category, permission_manage_products
):
# given
category.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
category.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = app_api_client.post_graphql(
QUERY_CATEGORY_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["category"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_COLLECTION_PUBLIC_META = """
query collectionMeta($id: ID!){
collection(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_collection_as_anonymous_user(api_client, collection):
# given
collection.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
collection.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Collection", collection.pk)}
# when
response = api_client.post_graphql(QUERY_COLLECTION_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["collection"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_collection_as_customer(user_api_client, collection):
# given
collection.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
collection.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Collection", collection.pk)}
# when
response = user_api_client.post_graphql(QUERY_COLLECTION_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["collection"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_collection_as_staff(
staff_api_client, collection, permission_manage_products
):
# given
collection.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
collection.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Collection", collection.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_COLLECTION_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["collection"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_collection_as_app(
app_api_client, collection, permission_manage_products
):
# given
collection.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
collection.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Collection", collection.pk)}
# when
response = app_api_client.post_graphql(
QUERY_COLLECTION_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["collection"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_DIGITAL_CONTENT_PUBLIC_META = """
query digitalContentMeta($id: ID!){
digitalContent(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_digital_content_as_anonymous_user(
api_client, digital_content
):
# given
variables = {"id": graphene.Node.to_global_id("DigitalContent", digital_content.pk)}
# when
response = api_client.post_graphql(QUERY_DIGITAL_CONTENT_PUBLIC_META, variables)
# then
assert_no_permission(response)
def test_query_public_meta_for_digital_content_as_customer(
user_api_client, digital_content
):
# given
digital_content.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
digital_content.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("DigitalContent", digital_content.pk)}
# when
response = user_api_client.post_graphql(
QUERY_DIGITAL_CONTENT_PUBLIC_META, variables
)
# then
assert_no_permission(response)
def test_query_public_meta_for_digital_content_as_staff(
staff_api_client, digital_content, permission_manage_products
):
# given
digital_content.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
digital_content.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("DigitalContent", digital_content.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_DIGITAL_CONTENT_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["digitalContent"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_digital_content_as_app(
app_api_client, digital_content, permission_manage_products
):
# given
digital_content.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
digital_content.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("DigitalContent", digital_content.pk)}
# when
response = app_api_client.post_graphql(
QUERY_DIGITAL_CONTENT_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["digitalContent"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_PRODUCT_PUBLIC_META = """
query productsMeta($id: ID!){
product(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_product_as_anonymous_user(api_client, product):
# given
product.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
product.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Product", product.pk)}
# when
response = api_client.post_graphql(QUERY_PRODUCT_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["product"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_as_customer(user_api_client, product):
# given
product.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
product.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Product", product.pk)}
# when
response = user_api_client.post_graphql(QUERY_PRODUCT_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["product"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_as_staff(
staff_api_client, product, permission_manage_products
):
# given
product.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
product.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Product", product.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_PRODUCT_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["product"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_as_app(
app_api_client, product, permission_manage_products
):
# given
product.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
product.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("Product", product.pk)}
# when
response = app_api_client.post_graphql(
QUERY_PRODUCT_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["product"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_PRODUCT_TYPE_PUBLIC_META = """
query productTypeMeta($id: ID!){
productType(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_product_type_as_anonymous_user(api_client, product_type):
# given
product_type.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
product_type.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("ProductType", product_type.pk)}
# when
response = api_client.post_graphql(QUERY_PRODUCT_TYPE_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["productType"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_type_as_customer(user_api_client, product_type):
# given
product_type.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
product_type.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("ProductType", product_type.pk)}
# when
response = user_api_client.post_graphql(QUERY_PRODUCT_TYPE_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["productType"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_type_as_staff(
staff_api_client, product_type, permission_manage_products
):
# given
product_type.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
product_type.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("ProductType", product_type.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_PRODUCT_TYPE_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productType"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_type_as_app(
app_api_client, product_type, permission_manage_products
):
# given
product_type.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
product_type.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("ProductType", product_type.pk)}
# when
response = app_api_client.post_graphql(
QUERY_PRODUCT_TYPE_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productType"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_PRODUCT_VARIANT_PUBLIC_META = """
query productVariantMeta($id: ID!){
productVariant(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_product_variant_as_anonymous_user(api_client, variant):
# given
variant.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
variant.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("ProductVariant", variant.pk)}
# when
response = api_client.post_graphql(QUERY_PRODUCT_VARIANT_PUBLIC_META, variables)
content = get_graphql_content(response)
# then
metadata = content["data"]["productVariant"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_variant_as_customer(user_api_client, variant):
# given
variant.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
variant.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("ProductVariant", variant.pk)}
# when
response = user_api_client.post_graphql(
QUERY_PRODUCT_VARIANT_PUBLIC_META, variables
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productVariant"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_variant_as_staff(
staff_api_client, variant, permission_manage_products
):
# given
variant.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
variant.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("ProductVariant", variant.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_PRODUCT_VARIANT_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productVariant"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_product_variant_as_app(
app_api_client, variant, permission_manage_products
):
# given
variant.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
variant.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("ProductVariant", variant.pk)}
# when
response = app_api_client.post_graphql(
QUERY_PRODUCT_VARIANT_PUBLIC_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productVariant"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_APP_PUBLIC_META = """
query appMeta($id: ID!){
app(id: $id){
metadata{
key
value
}
}
}
"""
def test_query_public_meta_for_app_as_anonymous_user(api_client, app):
# given
variables = {"id": graphene.Node.to_global_id("App", app.pk)}
# when
response = api_client.post_graphql(QUERY_APP_PUBLIC_META, variables)
# then
assert_no_permission(response)
def test_query_public_meta_for_app_as_customer(user_api_client, app):
# given
variables = {"id": graphene.Node.to_global_id("App", app.pk)}
# when
response = user_api_client.post_graphql(QUERY_APP_PUBLIC_META, variables)
# then
assert_no_permission(response)
def test_query_public_meta_for_app_as_staff(
staff_api_client, app, permission_manage_apps
):
# given
app.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
app.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("App", app.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_APP_PUBLIC_META,
variables,
[permission_manage_apps],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["app"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
def test_query_public_meta_for_app_as_app(app_api_client, app, permission_manage_apps):
# given
app.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
app.save(update_fields=["metadata"])
variables = {"id": graphene.Node.to_global_id("App", app.pk)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PUBLIC_META,
variables,
[permission_manage_apps],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["app"]["metadata"][0]
assert metadata["key"] == PUBLIC_KEY
assert metadata["value"] == PUBLIC_VALUE
QUERY_SELF_PRIVATE_META = """
{
me{
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_me_as_customer(user_api_client):
# given
# when
response = user_api_client.post_graphql(QUERY_SELF_PRIVATE_META)
# then
assert_no_permission(response)
def test_query_private_meta_for_me_as_staff_with_manage_customer(
staff_api_client, permission_manage_users
):
# given
# when
response = staff_api_client.post_graphql(
QUERY_SELF_PRIVATE_META, None, [permission_manage_users]
)
# then
assert_no_permission(response)
def test_query_private_meta_for_me_as_staff_with_manage_staff(
staff_api_client, permission_manage_staff
):
# given
me = staff_api_client.user
me.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
me.save(update_fields=["private_metadata"])
# when
response = staff_api_client.post_graphql(
QUERY_SELF_PRIVATE_META, None, [permission_manage_staff]
)
content = get_graphql_content(response)
# then
metadata = content["data"]["me"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_USER_PRIVATE_META = """
query userMeta($id: ID!){
user(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_customer_as_staff(
staff_api_client, permission_manage_users, customer_user
):
# given
customer_user.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
customer_user.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("User", customer_user.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_USER_PRIVATE_META, variables, [permission_manage_users]
)
content = get_graphql_content(response)
# then
metadata = content["data"]["user"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_customer_as_app(
app_api_client, permission_manage_users, customer_user
):
# given
customer_user.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
customer_user.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("User", customer_user.pk)}
# when
response = app_api_client.post_graphql(
QUERY_USER_PRIVATE_META, variables, [permission_manage_users]
)
content = get_graphql_content(response)
# then
metadata = content["data"]["user"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_staff_as_other_staff(
staff_api_client, permission_manage_staff, admin_user
):
# given
admin_user.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
admin_user.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("User", admin_user.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_USER_PRIVATE_META, variables, [permission_manage_staff]
)
content = get_graphql_content(response)
# then
metadata = content["data"]["user"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_staff_as_app(
app_api_client, permission_manage_staff, admin_user
):
# given
admin_user.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
admin_user.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("User", admin_user.pk)}
# when
response = app_api_client.post_graphql(
QUERY_USER_PRIVATE_META, variables, [permission_manage_staff]
)
# then
assert_no_permission(response)
QUERY_CHECKOUT_PRIVATE_META = """
query checkoutMeta($token: UUID!){
checkout(token: $token){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_checkout_as_anonymous_user(api_client, checkout):
# given
variables = {"token": checkout.pk}
# when
response = api_client.post_graphql(QUERY_CHECKOUT_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_other_customer_checkout_as_anonymous_user(
api_client, checkout, customer_user
):
# given
checkout.user = customer_user
checkout.save(update_fields=["user"])
variables = {"token": checkout.pk}
# when
response = api_client.post_graphql(QUERY_CHECKOUT_PRIVATE_META, variables)
content = get_graphql_content(response)
# then
assert not content["data"]["checkout"]
def test_query_private_meta_for_checkout_as_customer(user_api_client, checkout):
# given
checkout.user = user_api_client.user
checkout.save(update_fields=["user"])
variables = {"token": checkout.pk}
# when
response = user_api_client.post_graphql(QUERY_CHECKOUT_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_checkout_as_staff(
staff_api_client, checkout, customer_user, permission_manage_checkouts
):
# given
checkout.user = customer_user
checkout.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
checkout.save(update_fields=["user", "private_metadata"])
variables = {"token": checkout.pk}
# when
response = staff_api_client.post_graphql(
QUERY_CHECKOUT_PRIVATE_META,
variables,
[permission_manage_checkouts],
check_no_permissions=False, # Remove after fix #5245
)
content = get_graphql_content(response)
# then
metadata = content["data"]["checkout"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_checkout_as_app(
app_api_client, checkout, customer_user, permission_manage_checkouts
):
# given
checkout.user = customer_user
checkout.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
checkout.save(update_fields=["user", "private_metadata"])
variables = {"token": checkout.pk}
# when
response = app_api_client.post_graphql(
QUERY_CHECKOUT_PRIVATE_META,
variables,
[permission_manage_checkouts],
check_no_permissions=False, # Remove after fix #5245
)
content = get_graphql_content(response)
# then
metadata = content["data"]["checkout"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_ORDER_BY_TOKEN_PRIVATE_META = """
query orderMeta($token: UUID!){
orderByToken(token: $token){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_order_by_token_as_anonymous_user(api_client, order):
# given
variables = {"token": order.token}
# when
response = api_client.post_graphql(QUERY_ORDER_BY_TOKEN_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_order_by_token_as_customer(user_api_client, order):
# given
order.user = user_api_client.user
order.save(update_fields=["user"])
variables = {"token": order.token}
# when
response = user_api_client.post_graphql(
QUERY_ORDER_BY_TOKEN_PRIVATE_META, variables
)
# then
assert_no_permission(response)
def test_query_private_meta_for_order_by_token_as_staff(
staff_api_client, order, customer_user, permission_manage_orders
):
# given
order.user = customer_user
order.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
order.save(update_fields=["user", "private_metadata"])
variables = {"token": order.token}
# when
response = staff_api_client.post_graphql(
QUERY_ORDER_BY_TOKEN_PRIVATE_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_order_by_token_as_app(
app_api_client, order, customer_user, permission_manage_orders
):
# given
order.user = customer_user
order.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
order.save(update_fields=["user", "private_metadata"])
variables = {"token": order.token}
# when
response = app_api_client.post_graphql(
QUERY_ORDER_BY_TOKEN_PRIVATE_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_ORDER_PRIVATE_META = """
query orderMeta($id: ID!){
order(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_order_as_anonymous_user(api_client, order):
# given
variables = {"id": graphene.Node.to_global_id("Order", order.pk)}
# when
response = api_client.post_graphql(QUERY_ORDER_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_order_as_customer(user_api_client, order):
# given
order.user = user_api_client.user
order.save(update_fields=["user"])
variables = {"id": graphene.Node.to_global_id("Order", order.pk)}
# when
response = user_api_client.post_graphql(QUERY_ORDER_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_order_as_staff(
staff_api_client, order, customer_user, permission_manage_orders
):
# given
order.user = customer_user
order.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
order.save(update_fields=["user", "private_metadata"])
variables = {"id": graphene.Node.to_global_id("Order", order.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_ORDER_PRIVATE_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["order"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_order_as_app(
app_api_client, order, customer_user, permission_manage_orders
):
# given
order.user = customer_user
order.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
order.save(update_fields=["user", "private_metadata"])
variables = {"id": graphene.Node.to_global_id("Order", order.pk)}
# when
response = app_api_client.post_graphql(
QUERY_ORDER_PRIVATE_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["order"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_DRAFT_ORDER_PRIVATE_META = """
query draftOrderMeta($id: ID!){
order(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_draft_order_as_anonymous_user(api_client, draft_order):
# given
variables = {"id": graphene.Node.to_global_id("Order", draft_order.pk)}
# when
response = api_client.post_graphql(QUERY_DRAFT_ORDER_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_draft_order_as_customer(user_api_client, draft_order):
# given
draft_order.user = user_api_client.user
draft_order.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
draft_order.save(update_fields=["user", "private_metadata"])
variables = {"id": graphene.Node.to_global_id("Order", draft_order.pk)}
# when
response = user_api_client.post_graphql(QUERY_DRAFT_ORDER_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_draft_order_as_staff(
staff_api_client, draft_order, customer_user, permission_manage_orders
):
# given
draft_order.user = customer_user
draft_order.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
draft_order.save(update_fields=["user", "private_metadata"])
variables = {"id": graphene.Node.to_global_id("Order", draft_order.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_DRAFT_ORDER_PRIVATE_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["order"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_draft_order_as_app(
app_api_client, draft_order, customer_user, permission_manage_orders
):
# given
draft_order.user = customer_user
draft_order.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
draft_order.save(update_fields=["user", "private_metadata"])
variables = {"id": graphene.Node.to_global_id("Order", draft_order.pk)}
# when
response = app_api_client.post_graphql(
QUERY_ORDER_PRIVATE_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["order"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_FULFILLMENT_PRIVATE_META = """
query fulfillmentMeta($token: UUID!){
orderByToken(token: $token){
fulfillments{
privateMetadata{
key
value
}
}
}
}
"""
def test_query_private_meta_for_fulfillment_as_anonymous_user(
api_client, fulfilled_order
):
# given
variables = {"token": fulfilled_order.token}
# when
response = api_client.post_graphql(QUERY_FULFILLMENT_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_fulfillment_as_customer(
user_api_client, fulfilled_order
):
# given
fulfilled_order.user = user_api_client.user
fulfilled_order.save(update_fields=["user"])
variables = {"token": fulfilled_order.token}
# when
response = user_api_client.post_graphql(QUERY_FULFILLMENT_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_fulfillment_as_staff(
staff_api_client, fulfilled_order, customer_user, permission_manage_orders
):
# given
fulfillment = fulfilled_order.fulfillments.first()
fulfillment.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
fulfillment.save(update_fields=["private_metadata"])
fulfilled_order.user = customer_user
fulfilled_order.save(update_fields=["user"])
variables = {"token": fulfilled_order.token}
# when
response = staff_api_client.post_graphql(
QUERY_FULFILLMENT_PRIVATE_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["fulfillments"][0]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_fulfillment_as_app(
app_api_client, fulfilled_order, customer_user, permission_manage_orders
):
# given
fulfillment = fulfilled_order.fulfillments.first()
fulfillment.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
fulfillment.save(update_fields=["private_metadata"])
fulfilled_order.user = customer_user
fulfilled_order.save(update_fields=["user"])
variables = {"token": fulfilled_order.token}
# when
response = app_api_client.post_graphql(
QUERY_FULFILLMENT_PRIVATE_META,
variables,
[permission_manage_orders],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["orderByToken"]["fulfillments"][0]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_ATTRIBUTE_PRIVATE_META = """
query attributeMeta($id: ID!){
attribute(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_attribute_as_anonymous_user(
api_client, color_attribute
):
# given
variables = {"id": graphene.Node.to_global_id("Attribute", color_attribute.pk)}
# when
response = api_client.post_graphql(QUERY_ATTRIBUTE_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_attribute_as_customer(user_api_client, color_attribute):
# given
variables = {"id": graphene.Node.to_global_id("Attribute", color_attribute.pk)}
# when
response = user_api_client.post_graphql(QUERY_ATTRIBUTE_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_attribute_as_staff(
staff_api_client, color_attribute, permission_manage_products
):
# given
color_attribute.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
color_attribute.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("Attribute", color_attribute.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_ATTRIBUTE_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["attribute"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_attribute_as_app(
app_api_client, color_attribute, permission_manage_products
):
# given
color_attribute.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
color_attribute.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("Attribute", color_attribute.pk)}
# when
response = app_api_client.post_graphql(
QUERY_ATTRIBUTE_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["attribute"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_CATEGORY_PRIVATE_META = """
query categoryMeta($id: ID!){
category(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_category_as_anonymous_user(api_client, category):
# given
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = api_client.post_graphql(QUERY_CATEGORY_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_category_as_customer(user_api_client, category):
# given
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = user_api_client.post_graphql(QUERY_CATEGORY_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_category_as_staff(
staff_api_client, category, permission_manage_products
):
# given
category.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
category.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_CATEGORY_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["category"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_category_as_app(
app_api_client, category, permission_manage_products
):
# given
category.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
category.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = app_api_client.post_graphql(
QUERY_CATEGORY_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["category"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_COLLECTION_PRIVATE_META = """
query collectionMeta($id: ID!){
collection(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_collection_as_anonymous_user(api_client, collection):
# given
variables = {"id": graphene.Node.to_global_id("Collection", collection.pk)}
# when
response = api_client.post_graphql(QUERY_COLLECTION_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_collection_as_customer(user_api_client, collection):
# given
variables = {"id": graphene.Node.to_global_id("Collection", collection.pk)}
# when
response = user_api_client.post_graphql(QUERY_COLLECTION_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_collection_as_staff(
staff_api_client, collection, permission_manage_products
):
# given
collection.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
collection.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("Collection", collection.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_COLLECTION_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["collection"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_collection_as_app(
app_api_client, collection, permission_manage_products
):
# given
collection.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
collection.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("Collection", collection.pk)}
# when
response = app_api_client.post_graphql(
QUERY_COLLECTION_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["collection"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_DIGITAL_CONTENT_PRIVATE_META = """
query digitalContentMeta($id: ID!){
digitalContent(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_digital_content_as_anonymous_user(
api_client, digital_content
):
# given
variables = {"id": graphene.Node.to_global_id("DigitalContent", digital_content.pk)}
# when
response = api_client.post_graphql(QUERY_DIGITAL_CONTENT_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_digital_content_as_customer(
user_api_client, digital_content
):
# given
digital_content.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
digital_content.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("DigitalContent", digital_content.pk)}
# when
response = user_api_client.post_graphql(
QUERY_DIGITAL_CONTENT_PRIVATE_META, variables
)
# then
assert_no_permission(response)
def test_query_private_meta_for_digital_content_as_staff(
staff_api_client, digital_content, permission_manage_products
):
# given
digital_content.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
digital_content.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("DigitalContent", digital_content.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_DIGITAL_CONTENT_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["digitalContent"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_digital_content_as_app(
app_api_client, digital_content, permission_manage_products
):
# given
digital_content.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
digital_content.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("DigitalContent", digital_content.pk)}
# when
response = app_api_client.post_graphql(
QUERY_DIGITAL_CONTENT_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["digitalContent"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_PRODUCT_PRIVATE_META = """
query productsMeta($id: ID!){
product(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_product_as_anonymous_user(api_client, product):
# given
variables = {"id": graphene.Node.to_global_id("Product", product.pk)}
# when
response = api_client.post_graphql(QUERY_PRODUCT_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_product_as_customer(user_api_client, product):
# given
variables = {"id": graphene.Node.to_global_id("Product", product.pk)}
# when
response = user_api_client.post_graphql(QUERY_PRODUCT_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_product_as_staff(
staff_api_client, product, permission_manage_products
):
# given
product.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
product.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("Product", product.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_PRODUCT_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["product"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_product_as_app(
app_api_client, product, permission_manage_products
):
# given
product.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
product.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("Product", product.pk)}
# when
response = app_api_client.post_graphql(
QUERY_PRODUCT_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["product"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_PRODUCT_TYPE_PRIVATE_META = """
query productTypeMeta($id: ID!){
productType(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_product_type_as_anonymous_user(
api_client, product_type
):
# given
variables = {"id": graphene.Node.to_global_id("ProductType", product_type.pk)}
# when
response = api_client.post_graphql(QUERY_PRODUCT_TYPE_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_product_type_as_customer(user_api_client, product_type):
# given
variables = {"id": graphene.Node.to_global_id("ProductType", product_type.pk)}
# when
response = user_api_client.post_graphql(QUERY_PRODUCT_TYPE_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_product_type_as_staff(
staff_api_client, product_type, permission_manage_products
):
# given
product_type.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
product_type.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("ProductType", product_type.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_PRODUCT_TYPE_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productType"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_product_type_as_app(
app_api_client, product_type, permission_manage_products
):
# given
product_type.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
product_type.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("ProductType", product_type.pk)}
# when
response = app_api_client.post_graphql(
QUERY_PRODUCT_TYPE_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productType"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_PRODUCT_VARIANT_PRIVATE_META = """
query productVariantMeta($id: ID!){
productVariant(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_product_variant_as_anonymous_user(api_client, variant):
# given
variant.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
variant.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("ProductVariant", variant.pk)}
# when
response = api_client.post_graphql(QUERY_PRODUCT_VARIANT_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_product_variant_as_customer(user_api_client, variant):
# given
variant.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
variant.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("ProductVariant", variant.pk)}
# when
response = user_api_client.post_graphql(
QUERY_PRODUCT_VARIANT_PRIVATE_META, variables
)
# then
assert_no_permission(response)
def test_query_private_meta_for_product_variant_as_staff(
staff_api_client, variant, permission_manage_products
):
# given
variant.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
variant.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("ProductVariant", variant.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_PRODUCT_VARIANT_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productVariant"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_product_variant_as_app(
app_api_client, variant, permission_manage_products
):
# given
variant.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
variant.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("ProductVariant", variant.pk)}
# when
response = app_api_client.post_graphql(
QUERY_PRODUCT_VARIANT_PRIVATE_META,
variables,
[permission_manage_products],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["productVariant"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
QUERY_APP_PRIVATE_META = """
query appMeta($id: ID!){
app(id: $id){
privateMetadata{
key
value
}
}
}
"""
def test_query_private_meta_for_app_as_anonymous_user(api_client, app):
# given
variables = {"id": graphene.Node.to_global_id("App", app.pk)}
# when
response = api_client.post_graphql(QUERY_APP_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_app_as_customer(user_api_client, app):
# given
variables = {"id": graphene.Node.to_global_id("App", app.pk)}
# when
response = user_api_client.post_graphql(QUERY_APP_PRIVATE_META, variables)
# then
assert_no_permission(response)
def test_query_private_meta_for_app_as_staff(
staff_api_client, app, permission_manage_apps
):
# given
app.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
app.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("App", app.pk)}
# when
response = staff_api_client.post_graphql(
QUERY_APP_PRIVATE_META,
variables,
[permission_manage_apps],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["app"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
def test_query_private_meta_for_app_as_app(app_api_client, app, permission_manage_apps):
# given
app.store_value_in_private_metadata({PRIVATE_KEY: PRIVATE_VALUE})
app.save(update_fields=["private_metadata"])
variables = {"id": graphene.Node.to_global_id("App", app.pk)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PRIVATE_META,
variables,
[permission_manage_apps],
check_no_permissions=False,
)
content = get_graphql_content(response)
# then
metadata = content["data"]["app"]["privateMetadata"][0]
assert metadata["key"] == PRIVATE_KEY
assert metadata["value"] == PRIVATE_VALUE
| 29.485271
| 88
| 0.702545
| 8,957
| 76,072
| 5.546165
| 0.011611
| 0.045474
| 0.028746
| 0.04791
| 0.991767
| 0.988425
| 0.987177
| 0.986211
| 0.968335
| 0.959277
| 0
| 0.001645
| 0.19312
| 76,072
| 2,579
| 89
| 29.496704
| 0.807677
| 0.026173
| 0
| 0.77243
| 0
| 0
| 0.130991
| 0.001872
| 0
| 0
| 0
| 0
| 0.118241
| 1
| 0.070707
| false
| 0
| 0.001188
| 0
| 0.071895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
532be560cbc0cbfa605edb91cfe4b9ab86691b28
| 3,588
|
py
|
Python
|
behave_tests/steps/signup.py
|
Sindhuja-SRL/back-end
|
d84dae8ed212913339dec646b46a67fcc0b77f52
|
[
"MIT"
] | null | null | null |
behave_tests/steps/signup.py
|
Sindhuja-SRL/back-end
|
d84dae8ed212913339dec646b46a67fcc0b77f52
|
[
"MIT"
] | null | null | null |
behave_tests/steps/signup.py
|
Sindhuja-SRL/back-end
|
d84dae8ed212913339dec646b46a67fcc0b77f52
|
[
"MIT"
] | 1
|
2022-03-11T01:45:39.000Z
|
2022-03-11T01:45:39.000Z
|
from behave import *
import requests
from django.contrib.auth.models import User
use_step_matcher("re")
@given("that I am a unregistered host of privilege walk events")
def step_impl(context):
context.username = "12thMan"
context.password = "SomePassword123"
context.first_name = "12th"
context.last_name = "Man"
context.email = "twelve@testtamu.edu"
@when("I make an API call to the sign up API with my details")
def step_impl(context):
data = {
"username": context.username,
"password": context.password,
"email" : context.email,
"first_name" : context.first_name,
"last_name" : context.last_name
}
resp = requests.post(context.test.live_server_url + "/auth/signup/", data)
assert resp.status_code >= 200 and resp.status_code < 300
context.api_response_data = resp.json()
@then("I expect the response to tell me that I have signed up successfully")
def step_impl(context):
assert context.api_response_data["created"] == "success"
@given("that I am someone who wants to signup with used emailid")
def step_impl(context):
context.username = "12thMan"
context.password = "SomePassword123"
context.first_name = "12th"
context.last_name = "Man"
context.email = "twelve@testtamu.edu"
usr = User.objects.create_user(
context.username,
context.email,
context.password
)
usr.first_name = context.first_name
usr.last_name = context.last_name
usr.save()
registered_user = User.objects.filter(username="12thMan")
assert len(registered_user) == 1
@when("I make an API call to the sign up API with a used email id")
def step_impl(context):
data = {
"username": context.username,
"password": context.password,
"email" : context.email,
"first_name" : context.first_name,
"last_name" : context.last_name
}
resp = requests.post(context.test.live_server_url + "/auth/signup/", data)
assert resp.status_code >= 200 and resp.status_code < 300
context.api_response_data = resp.json()
@then("I expect the response to tell me that the sign up is not successful due to email id is already used")
def step_impl(context):
assert context.api_response_data["created"] == "email exists"
@given("that I am someone who wants to signup with used username")
def step_impl(context):
context.username = "12thMan"
context.password = "SomePassword123"
context.first_name = "12th"
context.last_name = "Man"
context.email = "different@testtamu.edu"
usr = User.objects.create_user(
context.username,
"twelve@testtamu.edu",
context.password
)
usr.first_name = context.first_name
usr.last_name = context.last_name
usr.save()
registered_user = User.objects.filter(username="12thMan")
assert len(registered_user) == 1
@when("I make an API call to the sign up API with a used username")
def step_impl(context):
data = {
"username": context.username,
"password": context.password,
"email" : context.email,
"first_name" : context.first_name,
"last_name" : context.last_name
}
resp = requests.post(context.test.live_server_url + "/auth/signup/", data)
assert resp.status_code >= 200 and resp.status_code < 300
context.api_response_data = resp.json()
@then("I expect the response to tell me that the sign up is not successful due to username is already used")
def step_impl(context):
assert context.api_response_data["created"] == "username exists"
| 29.409836
| 108
| 0.683668
| 485
| 3,588
| 4.919588
| 0.202062
| 0.049036
| 0.041492
| 0.067896
| 0.888516
| 0.888516
| 0.881811
| 0.881811
| 0.881811
| 0.84451
| 0
| 0.015839
| 0.208194
| 3,588
| 122
| 109
| 29.409836
| 0.824006
| 0
| 0
| 0.733333
| 0
| 0.011111
| 0.277236
| 0.00613
| 0
| 0
| 0
| 0
| 0.088889
| 1
| 0.1
| false
| 0.088889
| 0.033333
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
5337f81ede8a3b5604a1e4054c51c1f6e9738a7a
| 16,745
|
py
|
Python
|
ta/tests/volatility.py
|
kurtbaet/ta
|
f776673049fd314361fbc714900afaf248ab87b4
|
[
"MIT"
] | null | null | null |
ta/tests/volatility.py
|
kurtbaet/ta
|
f776673049fd314361fbc714900afaf248ab87b4
|
[
"MIT"
] | null | null | null |
ta/tests/volatility.py
|
kurtbaet/ta
|
f776673049fd314361fbc714900afaf248ab87b4
|
[
"MIT"
] | null | null | null |
import unittest
import pandas as pd
from ta.volatility import (AverageTrueRange, BollingerBands, DonchianChannel,
KeltnerChannel, UlcerIndex, average_true_range,
bollinger_hband, bollinger_hband_indicator,
bollinger_lband, bollinger_lband_indicator,
bollinger_mavg, bollinger_pband, bollinger_wband,
donchian_channel_hband, donchian_channel_lband,
donchian_channel_mband, donchian_channel_pband,
donchian_channel_wband, keltner_channel_hband,
keltner_channel_hband_indicator,
keltner_channel_lband,
keltner_channel_lband_indicator,
keltner_channel_mband, keltner_channel_pband,
keltner_channel_wband, ulcer_index)
class TestAverageTrueRange(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:average_true_range_atr
https://docs.google.com/spreadsheets/d/1DYG5NI_1px30aZ6oJkDIkWsyJW5V8jGbBVKIr9NWtec/edit?usp=sharing
"""
_filename = 'ta/tests/data/cs-atr.csv'
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=',')
cls._params = dict(high=cls._df['High'], low=cls._df['Low'], close=cls._df['Close'], n=14, fillna=False)
cls._indicator = AverageTrueRange(**cls._params)
@classmethod
def tearDownClass(cls):
del(cls._df)
def test_atr(self):
target = 'ATR'
result = average_true_range(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_atr2(self):
target = 'ATR'
result = self._indicator.average_true_range()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
class TestAverageTrueRange2(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:average_true_range_atr
https://docs.google.com/spreadsheets/d/1IRlmwVmRLAzjIIt2iXBukZyyaSAYB_0iRyAoOowZaBk/edit?usp=sharing
"""
_filename = 'ta/tests/data/cs-atr2.csv'
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=',')
cls._params = dict(high=cls._df['High'], low=cls._df['Low'], close=cls._df['Close'], n=10, fillna=False)
cls._indicator = AverageTrueRange(**cls._params)
@classmethod
def tearDownClass(cls):
del(cls._df)
def test_atr(self):
target = 'ATR'
result = self._indicator.average_true_range()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_atr2(self):
target = 'ATR'
result = average_true_range(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
class TestBollingerBands(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:bollinger_bands
"""
_filename = 'ta/tests/data/cs-bbands.csv'
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=',')
cls._params = dict(close=cls._df['Close'], n=20, ndev=2, fillna=False)
cls._indicator = BollingerBands(**cls._params)
@classmethod
def tearDownClass(cls):
del(cls._df)
def test_mavg(self):
target = 'MiddleBand'
result = self._indicator.bollinger_mavg()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband(self):
target = 'HighBand'
result = self._indicator.bollinger_hband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband(self):
target = 'LowBand'
result = self._indicator.bollinger_lband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_wband(self):
target = 'WidthBand'
result = self._indicator.bollinger_wband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_pband(self):
target = 'PercentageBand'
result = self._indicator.bollinger_pband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband_indicator(self):
target = 'CrossUp'
result = self._indicator.bollinger_hband_indicator()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband_indicator(self):
target = 'CrossDown'
result = self._indicator.bollinger_lband_indicator()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_mavg2(self):
target = 'MiddleBand'
result = bollinger_mavg(close=self._df['Close'], n=20, fillna=False)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband2(self):
target = 'HighBand'
result = bollinger_hband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband2(self):
target = 'LowBand'
result = bollinger_lband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_wband2(self):
target = 'WidthBand'
result = bollinger_wband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_pband2(self):
target = 'PercentageBand'
result = bollinger_pband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband_indicator2(self):
target = 'CrossUp'
result = bollinger_hband_indicator(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband_indicator2(self):
target = 'CrossDown'
result = bollinger_lband_indicator(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
class TestDonchianChannel(unittest.TestCase):
"""
https://www.investopedia.com/terms/d/donchianchannels.asp
https://docs.google.com/spreadsheets/d/17JWWsxSiAb24BLzncUpccc8hg-03QjVWVXmoRCJ2lME/edit#gid=0
"""
_filename = 'ta/tests/data/cs-dc.csv'
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=',')
cls._params = dict(
high=cls._df['high'], low=cls._df['low'], close=cls._df['close'], n=20, offset=0, fillna=False)
cls._indicator = DonchianChannel(**cls._params)
@classmethod
def tearDownClass(cls):
del(cls._df)
def test_mavg(self):
target = 'middle_band'
result = self._indicator.donchian_channel_mband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband(self):
target = 'upper_band'
result = self._indicator.donchian_channel_hband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband(self):
target = 'lower_band'
result = self._indicator.donchian_channel_lband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_wband(self):
target = 'dc_band_width'
result = self._indicator.donchian_channel_wband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_pband(self):
target = 'dc_percentage'
result = self._indicator.donchian_channel_pband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_mavg2(self):
target = 'middle_band'
result = donchian_channel_mband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband2(self):
target = 'upper_band'
result = donchian_channel_hband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband2(self):
target = 'lower_band'
result = donchian_channel_lband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_wband2(self):
target = 'dc_band_width'
result = donchian_channel_wband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_pband2(self):
target = 'dc_percentage'
result = donchian_channel_pband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
class TestDonchianChannel2(unittest.TestCase):
"""
https://www.investopedia.com/terms/d/donchianchannels.asp
https://docs.google.com/spreadsheets/d/17JWWsxSiAb24BLzncUpccc8hg-03QjVWVXmoRCJ2lME/edit#gid=0
"""
_filename = 'ta/tests/data/cs-dc2.csv'
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=',')
cls._params = dict(
high=cls._df['high'], low=cls._df['low'], close=cls._df['close'], n=20, offset=1, fillna=False)
cls._indicator = DonchianChannel(**cls._params)
@classmethod
def tearDownClass(cls):
del(cls._df)
def test_mavg(self):
target = 'middle_band'
result = self._indicator.donchian_channel_mband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband(self):
target = 'upper_band'
result = self._indicator.donchian_channel_hband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband(self):
target = 'lower_band'
result = self._indicator.donchian_channel_lband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_wband(self):
target = 'dc_band_width'
result = self._indicator.donchian_channel_wband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_pband(self):
target = 'dc_percentage'
result = self._indicator.donchian_channel_pband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_mavg2(self):
target = 'middle_band'
result = donchian_channel_mband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband2(self):
target = 'upper_band'
result = donchian_channel_hband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband2(self):
target = 'lower_band'
result = donchian_channel_lband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_wband2(self):
target = 'dc_band_width'
result = donchian_channel_wband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_pband2(self):
target = 'dc_percentage'
result = donchian_channel_pband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
class TestKeltnerChannel(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:keltner_channels
https://docs.google.com/spreadsheets/d/1qT8JbJ7F13bMV9-TcK-oFHL1F5sKPwakQWf6KrvGI3U/edit?usp=sharing
"""
_filename = 'ta/tests/data/cs-kc.csv'
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=',')
cls._params = dict(
high=cls._df['High'], low=cls._df['Low'], close=cls._df['Close'], n=20, n_atr=10, fillna=False, ov=False)
cls._indicator = KeltnerChannel(**cls._params)
@classmethod
def tearDownClass(cls):
del(cls._df)
def test_mavg(self):
target = 'middle_band'
result = self._indicator.keltner_channel_mband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband(self):
target = 'upper_band'
result = self._indicator.keltner_channel_hband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband(self):
target = 'lower_band'
result = self._indicator.keltner_channel_lband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_wband(self):
target = 'kc_band_width'
result = self._indicator.keltner_channel_wband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_pband(self):
target = 'kc_percentage'
result = self._indicator.keltner_channel_pband()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband_indicator(self):
target = 'kc_high_indicator'
result = self._indicator.keltner_channel_hband_indicator()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband_indicator(self):
target = 'kc_low_indicator'
result = self._indicator.keltner_channel_lband_indicator()
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_mavg2(self):
target = 'middle_band'
result = keltner_channel_mband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband2(self):
target = 'upper_band'
result = keltner_channel_hband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband2(self):
target = 'lower_band'
result = keltner_channel_lband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_wband2(self):
target = 'kc_band_width'
result = keltner_channel_wband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_pband2(self):
target = 'kc_percentage'
result = keltner_channel_pband(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_hband_indicator2(self):
target = 'kc_high_indicator'
result = keltner_channel_hband_indicator(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
def test_lband_indicator2(self):
target = 'kc_low_indicator'
result = keltner_channel_lband_indicator(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(), result.tail(), check_names=False)
class TestUlcerIndex(unittest.TestCase):
"""
https://stockcharts.com/school/doku.php?id=chart_school:technical_indicators:ulcer_index
https://docs.google.com/spreadsheets/d/1PpiRxv4Cnjqod9zNTnls4Lfn8lknHFnWk1DmaTZgZC8/edit#gid=0
"""
_filename = 'ta/tests/data/cs-ui.csv'
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=',')
cls._params = dict(close=cls._df['Close'], n=14, fillna=False)
cls._indicator = UlcerIndex(**cls._params)
@classmethod
def tearDownClass(cls):
del(cls._df)
def test_ulcer_index(self):
target = 'ulcer_index'
result = self._indicator.ulcer_index()
pd.testing.assert_series_equal(self._df[target].tail(1), result.tail(1), check_names=False)
def test_ulcer_index2(self):
target = 'ulcer_index'
result = ulcer_index(**self._params)
pd.testing.assert_series_equal(self._df[target].tail(1), result.tail(1), check_names=False)
| 39.586288
| 117
| 0.674828
| 2,052
| 16,745
| 5.210039
| 0.069201
| 0.030867
| 0.075765
| 0.106071
| 0.856047
| 0.830979
| 0.807127
| 0.807127
| 0.794594
| 0.794594
| 0
| 0.007723
| 0.19582
| 16,745
| 422
| 118
| 39.680095
| 0.786202
| 0.067304
| 0
| 0.766447
| 0
| 0
| 0.053009
| 0.010912
| 0
| 0
| 0
| 0
| 0.177632
| 1
| 0.223684
| false
| 0
| 0.009868
| 0
| 0.279605
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5352d1552be54c069025267ec82c76044ca6e568
| 3,496
|
py
|
Python
|
wlnupdates/wrapper.py
|
AlphaNeo513/wlnupdates-api.py
|
3fc77900fbb722b1d51c424721b9849e91106fd8
|
[
"MIT"
] | null | null | null |
wlnupdates/wrapper.py
|
AlphaNeo513/wlnupdates-api.py
|
3fc77900fbb722b1d51c424721b9849e91106fd8
|
[
"MIT"
] | null | null | null |
wlnupdates/wrapper.py
|
AlphaNeo513/wlnupdates-api.py
|
3fc77900fbb722b1d51c424721b9849e91106fd8
|
[
"MIT"
] | null | null | null |
import requests
import json
from pprint import pprint
class Wrapper:
def __init__(self):
self.headers = {'Content-Type': 'application/json'}
self.url = "https://www.wlnupdates.com/api" #API link
def get_series_data(self, id):
payload = {"id": id, "mode": "get-series-id"}
response = requests.request("POST", self.url, headers=self.headers, data=json.dumps(payload))
try:
json_data = response.json()
except:
return f"ID: {id} does not exist"
if json_data['error'] is True:
message = 'Error! '+json_data['message']
return message
else:
return json_data
def get_watches(self, id):
payload = {"id": id, "mode": "get-tag-id"}
response = requests.request("POST", self.url, headers=self.headers, data=json.dumps(payload))
try:
json_data = response.json()
except:
return f"ID: {id} does not exist"
if json_data['error'] is True:
message = 'Error! '+json_data['message']
return message
else:
return json_data
def get_publisher_data(self, id):
payload = {"id": id, "mode": "get-publisher-id"}
response = requests.request("POST", self.url, headers=self.headers, data=json.dumps(payload))
try:
json_data = response.json()
except:
return f"ID: {id} does not exist"
if json_data['error'] is True:
message = 'Error! '+json_data['message']
return message
else:
return json_data
def get_group_data(self, id):
payload = {"id": id, "mode": "get-group-id"}
response = requests.request("POST", self.url, headers=self.headers, data=json.dumps(payload))
try:
json_data = response.json()
except:
return f"ID: {id} does not exist"
if json_data['error'] is True:
message = 'Error! '+json_data['message']
return message
else:
return json_data
def get_artist_data(self, id):
payload = {"id": id, "mode": "get-artist-id"}
response = requests.request("POST", self.url, headers=self.headers, data=json.dumps(payload))
try:
json_data = response.json()
except:
return f"ID: {id} does not exist"
if json_data['error'] is True:
message = 'Error! '+json_data['message']
return message
else:
return json_data
def get_author_data(self, id):
payload = {"id": id, "mode": "get-author-id"}
response = requests.request("POST", self.url, headers=self.headers, data=json.dumps(payload))
try:
json_data = response.json()
except:
return f"ID: {id} does not exist"
if json_data['error'] is True:
message = 'Error! '+json_data['message']
return message
else:
return json_data
def get_genre_data(self, id):
payload = {"id": id, "mode": "get-genre-id"}
response = requests.request("POST", self.url, headers=self.headers, data=json.dumps(payload))
try:
json_data = response.json()
except:
return f"ID: {id} does not exist"
if json_data['error'] is True:
message = 'Error! '+json_data['message']
return message
else:
return json_data
| 34.613861
| 101
| 0.555492
| 419
| 3,496
| 4.527446
| 0.121718
| 0.118081
| 0.04797
| 0.055351
| 0.876647
| 0.876647
| 0.876647
| 0.863996
| 0.775435
| 0.775435
| 0
| 0
| 0.316934
| 3,496
| 100
| 102
| 34.96
| 0.794389
| 0.002288
| 0
| 0.769231
| 0
| 0
| 0.146544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.087912
| false
| 0
| 0.032967
| 0
| 0.362637
| 0.010989
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
535988fa15e5f7bc9ecc67b4cbe744a58c2036de
| 4,181
|
py
|
Python
|
tests/test_coroutines.py
|
MVilstrup/parallely
|
555b7b0a0a222232fe5016525b50e5b5632b97b5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_coroutines.py
|
MVilstrup/parallely
|
555b7b0a0a222232fe5016525b50e5b5632b97b5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_coroutines.py
|
MVilstrup/parallely
|
555b7b0a0a222232fe5016525b50e5b5632b97b5
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from parallely import asynced
convert = lambda func: asynced(func)
def test_empty(single_arg, multi_kwarg):
single_arg, multi_kwarg = convert(single_arg), convert(multi_kwarg)
with pytest.raises(TypeError):
assert single_arg()
with pytest.raises(TypeError):
multi_kwarg()
def test_empty_async(single_arg_async, multi_kwarg_async):
single_arg, multi_kwarg = convert(single_arg_async), convert(multi_kwarg_async)
with pytest.raises(TypeError):
assert single_arg()
with pytest.raises(TypeError):
multi_kwarg()
def test_normal_call(single_arg, multi_kwarg):
single_arg, multi_kwarg = convert(single_arg), convert(multi_kwarg)
assert single_arg(1) == 1
assert single_arg(a=1) == 1
assert multi_kwarg(1, 1) == 2
assert multi_kwarg(a=1, b=1) == 2
assert multi_kwarg(1, b=1) == 2
def test_normal_call_async(single_arg_async, multi_kwarg_async):
single_arg, multi_kwarg = convert(single_arg_async), convert(multi_kwarg_async)
assert single_arg(1) == 1
assert single_arg(a=1) == 1
assert multi_kwarg(1, 1) == 2
assert multi_kwarg(a=1, b=1) == 2
assert multi_kwarg(1, b=1) == 2
def test_iterators_only(single_arg, multi_kwarg):
single_arg, multi_kwarg = convert(single_arg), convert(multi_kwarg)
assert single_arg.map([1, 1]) == [1, 1]
assert single_arg.map(a=[1, 1]) == [1, 1]
assert multi_kwarg.map([1, 1], [1, 1]) == [2, 2]
assert multi_kwarg.map(a=[1, 1], b=[1, 1]) == [2, 2]
assert multi_kwarg.map([1, 1], b=[1, 1]) == [2, 2]
def test_iterators_only_async(single_arg_async, multi_kwarg_async):
single_arg, multi_kwarg = convert(single_arg_async), convert(multi_kwarg_async)
assert single_arg.map([1, 1]) == [1, 1]
assert single_arg.map(a=[1, 1]) == [1, 1]
assert multi_kwarg.map([1, 1], [1, 1]) == [2, 2]
assert multi_kwarg.map(a=[1, 1], b=[1, 1]) == [2, 2]
assert multi_kwarg.map([1, 1], b=[1, 1]) == [2, 2]
def test_iterators_and_constants(multi_kwarg):
multi_kwarg = convert(multi_kwarg)
assert multi_kwarg.map([1, 1], 1) == [2, 2]
assert multi_kwarg.map(1, [1, 1]) == [2, 2]
assert multi_kwarg.map(a=1, b=[1, 1]) == [2, 2]
assert multi_kwarg.map(a=[1, 1], b=1) == [2, 2]
assert multi_kwarg.map(1, b=[1, 1]) == [2, 2]
assert multi_kwarg.map([1, 1], b=1) == [2, 2]
def test_iterators_and_constants_async(multi_kwarg_async):
multi_kwarg = convert(multi_kwarg_async)
assert multi_kwarg.map([1, 1], 1) == [2, 2]
assert multi_kwarg.map(1, [1, 1]) == [2, 2]
assert multi_kwarg.map(a=1, b=[1, 1]) == [2, 2]
assert multi_kwarg.map(a=[1, 1], b=1) == [2, 2]
assert multi_kwarg.map(1, b=[1, 1]) == [2, 2]
assert multi_kwarg.map([1, 1], b=1) == [2, 2]
def test_uneven_iterators(multi_kwarg):
multi_kwarg = convert(multi_kwarg)
assert multi_kwarg.map([1, 1], [1]) == [2]
assert multi_kwarg.map(a=[1, 1], b=[1]) == [2]
assert multi_kwarg.map([1], b=[1, 1]) == [2]
def test_uneven_iterators_async(multi_kwarg_async):
multi_kwarg = convert(multi_kwarg_async)
assert multi_kwarg.map([1, 1], [1]) == [2]
assert multi_kwarg.map(a=[1, 1], b=[1]) == [2]
assert multi_kwarg.map([1], b=[1, 1]) == [2]
def test_empty_iterators(multi_kwarg):
multi_kwarg = convert(multi_kwarg)
with pytest.raises(ValueError):
assert multi_kwarg.map([1, 1], []) == [2]
with pytest.raises(ValueError):
assert multi_kwarg.map(a=[1, 1], b=[]) == [2]
with pytest.raises(ValueError):
assert multi_kwarg.map([], b=[1, 1]) == [2]
def test_empty_iterators_async(multi_kwarg_async):
multi_kwarg = convert(multi_kwarg_async)
with pytest.raises(ValueError):
assert multi_kwarg.map([1, 1], []) == [2]
with pytest.raises(ValueError):
assert multi_kwarg.map(a=[1, 1], b=[]) == [2]
with pytest.raises(ValueError):
assert multi_kwarg.map([], b=[1, 1]) == [2]
def test_iterators_dicts():
@convert
def test(numeric, dict_like):
assert isinstance(dict_like, dict)
return numeric
assert test.map([1, 1], dict_like={"one": 1}) == [1, 1]
| 29.652482
| 83
| 0.639321
| 673
| 4,181
| 3.741456
| 0.059435
| 0.293884
| 0.228753
| 0.22637
| 0.917792
| 0.902701
| 0.902701
| 0.902701
| 0.863781
| 0.863781
| 0
| 0.054799
| 0.192538
| 4,181
| 140
| 84
| 29.864286
| 0.691055
| 0
| 0
| 0.769231
| 0
| 0
| 0.000718
| 0
| 0
| 0
| 0
| 0
| 0.527473
| 1
| 0.153846
| false
| 0
| 0.021978
| 0
| 0.186813
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
5368ee67d566caebcf124e3689a2cc5643adb9af
| 7,825
|
py
|
Python
|
morphomatics_med/stats/ExponentialBarycenter.py
|
MATHplus-Young-Academy/P3-Morph-Scoring
|
0e2ba66cf28e30525b22706cc50d23b9de09a58a
|
[
"Apache-2.0"
] | 1
|
2022-03-16T20:15:16.000Z
|
2022-03-16T20:15:16.000Z
|
morphomatics_med/stats/ExponentialBarycenter.py
|
MATHplus-Young-Academy/P3-Morph-Scoring
|
0e2ba66cf28e30525b22706cc50d23b9de09a58a
|
[
"Apache-2.0"
] | null | null | null |
morphomatics_med/stats/ExponentialBarycenter.py
|
MATHplus-Young-Academy/P3-Morph-Scoring
|
0e2ba66cf28e30525b22706cc50d23b9de09a58a
|
[
"Apache-2.0"
] | null | null | null |
################################################################################
# #
# This file is part of the Morphomatics library #
# see https://github.com/morphomatics/morphomatics #
# #
# Copyright (C) 2021 Zuse Institute Berlin #
# #
# Morphomatics is distributed under the terms of the ZIB Academic License. #
# see $MORPHOMATICS/LICENSE #
# #
################################################################################
import numpy as np
from joblib import Parallel, delayed
from joblib import parallel_backend
from ..manifold import Manifold
#from pymanopt.solvers import SteepestDescent
#from pymanopt import Problem
class ExponentialBarycenter(object):
"""
Exponential barycenter, see e.g.
Pennec and Arsigny (2012): Exponential Barycenters of the Canonical Cartan Connection and Invariant Means on Lie Groups.
The barycenter will be a bi-invariant notion of mean in the Lie group setting and the Frechét mean for Riemamnnian manifolds.
(For the special case of a bi-invariant metric, both notions will agree.)
"""
@staticmethod
def compute(mfd: Manifold, data, x=None, max_iter=100, n_jobs=-1,alpha=1):
"""
:arg mfd: data space in which mean is computed
:arg data: list of data points
:arg x: initial guess
:returns: mean of data, i.e. exp. barycenter thereof
"""
assert mfd.connec
# initial guess
if x is None:
x = data[0].copy() #TODO: better guess -> choose most central sample
# compute intrinsic mean
#cost = lambda a: 0.5 / len(data) * np.sum([mfd.metric.dist(a, b) ** 2 for b in data])
#grad = lambda a: np.sum([mfd.connec.log(a, b) for b in data], axis=0) / len(data)
# hess = lambda a, b: b
# problem = Problem(manifold=mfd, cost=cost, grad=grad, hess=hess, verbosity=2)
# x = SteepestDescent(maxiter=max_iter).solve(problem, x=x)
# Newton-type fixed point iteration
with Parallel(n_jobs=n_jobs, prefer='threads', verbose=0) as parallel:
normed_log = lambda a : mfd.connec.log(a[0],a[1])/mfd.metric.dist(a[0],a[1])
grad = lambda a: np.sum(parallel(delayed(normed_log)((a, b))
for b in data if mfd.metric.dist(a,b) > 0), axis=0)
grad_denom = lambda a: 1/np.sum([mfd.metric.dist(a,b) for b in data if mfd.metric.dist(a,b)>0])
for _ in range(100):
g = grad(x)*grad_denom(x)
if mfd.metric:
g_norm = mfd.metric.norm(x, -g)
else:
g_norm = np.linalg.norm(-g)
print(f'|grad|={g_norm}')
print("I AM HERE")
if g_norm < 1e-12: break
x = mfd.connec.exp(x, -alpha*g)
return x
@staticmethod
def total_variance(mfd: Manifold, data, x=None):
"""
:arg mfd: data space in which mean is computed
:arg data: samples
:arg x: center
:returns: total variance
"""
assert mfd.connec and mfd.metric
if x is None:
x = ExponentialBarycenter.compute(mfd, data)
return np.sum([mfd.metric.dist(x, y) ** 2 for y in data]) / len(data)
def median_grad(mfd: Manifold, data, x):
list_log = [mfd.connec.log(x,b) for b in data]
list_normedlog = [mfd.connec.norm(x,v) for v in list_log]################################################################################
# #
# This file is part of the Morphomatics library #
# see https://github.com/morphomatics/morphomatics #
# #
# Copyright (C) 2021 Zuse Institute Berlin #
# #
# Morphomatics is distributed under the terms of the ZIB Academic License. #
# see $MORPHOMATICS/LICENSE #
# #
################################################################################
import numpy as np
from joblib import Parallel, delayed
from joblib import parallel_backend
from ..manifold import Manifold
#from pymanopt.solvers import SteepestDescent
#from pymanopt import Problem
class ExponentialBarycenter(object):
"""
Exponential barycenter, see e.g.
Pennec and Arsigny (2012): Exponential Barycenters of the Canonical Cartan Connection and Invariant Means on Lie Groups.
The barycenter will be a bi-invariant notion of mean in the Lie group setting and the Frechét mean for Riemamnnian manifolds.
(For the special case of a bi-invariant metric, both notions will agree.)
"""
@staticmethod
def compute(mfd: Manifold, data, x=None, max_iter=100, n_jobs=-1,alpha=1):
"""
:arg mfd: data space in which mean is computed
:arg data: list of data points
:arg x: initial guess
:returns: mean of data, i.e. exp. barycenter thereof
"""
assert mfd.connec
# initial guess
if x is None:
x = data[0].copy() #TODO: better guess -> choose most central sample
# compute intrinsic mean
#cost = lambda a: 0.5 / len(data) * np.sum([mfd.metric.dist(a, b) ** 2 for b in data])
#grad = lambda a: np.sum([mfd.connec.log(a, b) for b in data], axis=0) / len(data)
# hess = lambda a, b: b
# problem = Problem(manifold=mfd, cost=cost, grad=grad, hess=hess, verbosity=2)
# x = SteepestDescent(maxiter=max_iter).solve(problem, x=x)
# Newton-type fixed point iteration
with Parallel(n_jobs=n_jobs, prefer='threads', verbose=0) as parallel:
for _ in range(100):
g = median_grad(mfd, data, x)
if mfd.metric:
g_norm = mfd.metric.norm(x, -g)
else:
g_norm = np.linalg.norm(-g)
print(f'|grad|={g_norm}')
if g_norm < 1e-12: break
x = mfd.connec.exp(x, alpha*g)
return x
@staticmethod
def total_variance(mfd: Manifold, data, x=None):
"""
:arg mfd: data space in which mean is computed
:arg data: samples
:arg x: center
:returns: total variance
"""
assert mfd.connec and mfd.metric
if x is None:
x = ExponentialBarycenter.compute(mfd, data)
return np.sum([mfd.metric.dist(x, y) ** 2 for y in data]) / len(data)
def median_grad(mfd: Manifold, data, x):
w = 1.0
list_log = np.array([mfd.connec.log(x,b) for b in data])
list_normedlog = np.array([mfd.connec.norm(x,v) for v in list_log])
normed_log_mask = list_normedlog != 0
list_normedlog = list_normedlog[normed_log_mask]
list_log = list_log[normed_log_mask]
list_normedlog = np.expand_dims(list_normedlog,axis=-1)
grad = np.sum(np.divide(list_log,list_normedlog), axis = 0)
grad_denom = np.sum(np.reciprocal(list_normedlog), axis = 0)
G = np.divide(grad, grad_denom)
return G
| 40.968586
| 145
| 0.514505
| 938
| 7,825
| 4.232409
| 0.179104
| 0.031738
| 0.026196
| 0.020151
| 0.909824
| 0.898741
| 0.898741
| 0.878589
| 0.878589
| 0.878589
| 0
| 0.013476
| 0.355144
| 7,825
| 190
| 146
| 41.184211
| 0.773286
| 0.467987
| 0
| 0.702703
| 0
| 0
| 0.014968
| 0
| 0
| 0
| 0
| 0.010526
| 0.054054
| 1
| 0.081081
| false
| 0
| 0.108108
| 0
| 0.283784
| 0.040541
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7254496f48f76f869cf581fbac5d97a081c21e31
| 29,404
|
py
|
Python
|
src/plugins/dhcp/test/test_dhcp6.py
|
amithbraj/vpp
|
edf1da94dc099c6e2ab1d455ce8652fada3cdb04
|
[
"Apache-2.0"
] | 52
|
2016-09-20T15:08:46.000Z
|
2020-12-22T23:03:25.000Z
|
src/plugins/dhcp/test/test_dhcp6.py
|
amithbraj/vpp
|
edf1da94dc099c6e2ab1d455ce8652fada3cdb04
|
[
"Apache-2.0"
] | 63
|
2018-06-11T09:48:35.000Z
|
2021-01-05T09:11:03.000Z
|
src/plugins/dhcp/test/test_dhcp6.py
|
amithbraj/vpp
|
edf1da94dc099c6e2ab1d455ce8652fada3cdb04
|
[
"Apache-2.0"
] | 36
|
2016-07-21T11:20:33.000Z
|
2022-01-16T15:55:45.000Z
|
from socket import AF_INET6, inet_ntop, inet_pton
from scapy.layers.dhcp6 import DHCP6_Advertise, DHCP6OptClientId, \
DHCP6OptStatusCode, DHCP6OptPref, DHCP6OptIA_PD, DHCP6OptIAPrefix, \
DHCP6OptServerId, DHCP6_Solicit, DHCP6_Reply, DHCP6_Request, DHCP6_Renew, \
DHCP6_Rebind, DUID_LL, DHCP6_Release, DHCP6OptElapsedTime, DHCP6OptIA_NA, \
DHCP6OptIAAddress
from scapy.layers.inet6 import IPv6, Ether, UDP
from scapy.utils6 import in6_mactoifaceid
from framework import VppTestCase
from vpp_papi import VppEnum
import util
import os
def ip6_normalize(ip6):
return inet_ntop(AF_INET6, inet_pton(AF_INET6, ip6))
class TestDHCPv6DataPlane(VppTestCase):
""" DHCPv6 Data Plane Test Case """
@classmethod
def setUpClass(cls):
super(TestDHCPv6DataPlane, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(TestDHCPv6DataPlane, cls).tearDownClass()
def setUp(self):
super(TestDHCPv6DataPlane, self).setUp()
self.create_pg_interfaces(range(1))
self.interfaces = list(self.pg_interfaces)
for i in self.interfaces:
i.admin_up()
i.config_ip6()
self.server_duid = DUID_LL(lladdr=self.pg0.remote_mac)
def tearDown(self):
for i in self.interfaces:
i.unconfig_ip6()
i.admin_down()
super(TestDHCPv6DataPlane, self).tearDown()
def test_dhcp_ia_na_send_solicit_receive_advertise(self):
""" Verify DHCPv6 IA NA Solicit packet and Advertise event """
self.vapi.dhcp6_clients_enable_disable(enable=1)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
address = {'address': '1:2:3::5',
'preferred_time': 60,
'valid_time': 120}
self.vapi.dhcp6_send_client_message(
server_index=0xffffffff,
mrc=1,
msg_type=VppEnum.vl_api_dhcpv6_msg_type_t.DHCPV6_MSG_API_SOLICIT,
sw_if_index=self.pg0.sw_if_index,
T1=20,
T2=40,
addresses=[address],
n_addresses=len(
[address]))
rx_list = self.pg0.get_capture(1)
self.assertEqual(len(rx_list), 1)
packet = rx_list[0]
self.assertEqual(packet.haslayer(IPv6), 1)
self.assertEqual(packet[IPv6].haslayer(DHCP6_Solicit), 1)
client_duid = packet[DHCP6OptClientId].duid
trid = packet[DHCP6_Solicit].trid
dst = ip6_normalize(packet[IPv6].dst)
dst2 = ip6_normalize("ff02::1:2")
self.assert_equal(dst, dst2)
src = ip6_normalize(packet[IPv6].src)
src2 = ip6_normalize(self.pg0.local_ip6_ll)
self.assert_equal(src, src2)
ia_na = packet[DHCP6OptIA_NA]
self.assert_equal(ia_na.T1, 20)
self.assert_equal(ia_na.T2, 40)
self.assert_equal(len(ia_na.ianaopts), 1)
address = ia_na.ianaopts[0]
self.assert_equal(address.addr, '1:2:3::5')
self.assert_equal(address.preflft, 60)
self.assert_equal(address.validlft, 120)
self.vapi.want_dhcp6_reply_events(enable_disable=1,
pid=os.getpid())
try:
ia_na_opts = DHCP6OptIAAddress(addr='7:8::2', preflft=60,
validlft=120)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IPv6(src=util.mk_ll_addr(self.pg0.remote_mac),
dst=self.pg0.local_ip6_ll) /
UDP(sport=547, dport=546) /
DHCP6_Advertise(trid=trid) /
DHCP6OptServerId(duid=self.server_duid) /
DHCP6OptClientId(duid=client_duid) /
DHCP6OptPref(prefval=7) /
DHCP6OptStatusCode(statuscode=1) /
DHCP6OptIA_NA(iaid=1, T1=20, T2=40, ianaopts=ia_na_opts)
)
self.pg0.add_stream([p])
self.pg_start()
ev = self.vapi.wait_for_event(1, "dhcp6_reply_event")
self.assert_equal(ev.preference, 7)
self.assert_equal(ev.status_code, 1)
self.assert_equal(ev.T1, 20)
self.assert_equal(ev.T2, 40)
reported_address = ev.addresses[0]
address = ia_na_opts.getfieldval("addr")
self.assert_equal(str(reported_address.address), address)
self.assert_equal(reported_address.preferred_time,
ia_na_opts.getfieldval("preflft"))
self.assert_equal(reported_address.valid_time,
ia_na_opts.getfieldval("validlft"))
finally:
self.vapi.want_dhcp6_reply_events(enable_disable=0)
self.vapi.dhcp6_clients_enable_disable(enable=0)
def test_dhcp_pd_send_solicit_receive_advertise(self):
""" Verify DHCPv6 PD Solicit packet and Advertise event """
self.vapi.dhcp6_clients_enable_disable(enable=1)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
prefix = {'prefix': {'address': '1:2:3::', 'len': 50},
'preferred_time': 60,
'valid_time': 120}
prefixes = [prefix]
self.vapi.dhcp6_pd_send_client_message(
server_index=0xffffffff,
mrc=1,
msg_type=VppEnum.vl_api_dhcpv6_msg_type_t.DHCPV6_MSG_API_SOLICIT,
sw_if_index=self.pg0.sw_if_index,
T1=20,
T2=40,
prefixes=prefixes,
n_prefixes=len(prefixes))
rx_list = self.pg0.get_capture(1)
self.assertEqual(len(rx_list), 1)
packet = rx_list[0]
self.assertEqual(packet.haslayer(IPv6), 1)
self.assertEqual(packet[IPv6].haslayer(DHCP6_Solicit), 1)
client_duid = packet[DHCP6OptClientId].duid
trid = packet[DHCP6_Solicit].trid
dst = ip6_normalize(packet[IPv6].dst)
dst2 = ip6_normalize("ff02::1:2")
self.assert_equal(dst, dst2)
src = ip6_normalize(packet[IPv6].src)
src2 = ip6_normalize(self.pg0.local_ip6_ll)
self.assert_equal(src, src2)
ia_pd = packet[DHCP6OptIA_PD]
self.assert_equal(ia_pd.T1, 20)
self.assert_equal(ia_pd.T2, 40)
self.assert_equal(len(ia_pd.iapdopt), 1)
prefix = ia_pd.iapdopt[0]
self.assert_equal(prefix.prefix, '1:2:3::')
self.assert_equal(prefix.plen, 50)
self.assert_equal(prefix.preflft, 60)
self.assert_equal(prefix.validlft, 120)
self.vapi.want_dhcp6_pd_reply_events(enable_disable=1,
pid=os.getpid())
try:
ia_pd_opts = DHCP6OptIAPrefix(prefix='7:8::', plen=56, preflft=60,
validlft=120)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IPv6(src=util.mk_ll_addr(self.pg0.remote_mac),
dst=self.pg0.local_ip6_ll) /
UDP(sport=547, dport=546) /
DHCP6_Advertise(trid=trid) /
DHCP6OptServerId(duid=self.server_duid) /
DHCP6OptClientId(duid=client_duid) /
DHCP6OptPref(prefval=7) /
DHCP6OptStatusCode(statuscode=1) /
DHCP6OptIA_PD(iaid=1, T1=20, T2=40, iapdopt=ia_pd_opts)
)
self.pg0.add_stream([p])
self.pg_start()
ev = self.vapi.wait_for_event(1, "dhcp6_pd_reply_event")
self.assert_equal(ev.preference, 7)
self.assert_equal(ev.status_code, 1)
self.assert_equal(ev.T1, 20)
self.assert_equal(ev.T2, 40)
reported_prefix = ev.prefixes[0]
prefix = ia_pd_opts.getfieldval("prefix")
self.assert_equal(
str(reported_prefix.prefix).split('/')[0], prefix)
self.assert_equal(int(str(reported_prefix.prefix).split('/')[1]),
ia_pd_opts.getfieldval("plen"))
self.assert_equal(reported_prefix.preferred_time,
ia_pd_opts.getfieldval("preflft"))
self.assert_equal(reported_prefix.valid_time,
ia_pd_opts.getfieldval("validlft"))
finally:
self.vapi.want_dhcp6_pd_reply_events(enable_disable=0)
self.vapi.dhcp6_clients_enable_disable(enable=0)
class TestDHCPv6IANAControlPlane(VppTestCase):
""" DHCPv6 IA NA Control Plane Test Case """
@classmethod
def setUpClass(cls):
super(TestDHCPv6IANAControlPlane, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(TestDHCPv6IANAControlPlane, cls).tearDownClass()
def setUp(self):
super(TestDHCPv6IANAControlPlane, self).setUp()
self.create_pg_interfaces(range(1))
self.interfaces = list(self.pg_interfaces)
for i in self.interfaces:
i.admin_up()
self.server_duid = DUID_LL(lladdr=self.pg0.remote_mac)
self.client_duid = None
self.T1 = 1
self.T2 = 2
fib = self.vapi.ip_route_dump(0, True)
self.initial_addresses = set(self.get_interface_addresses(fib,
self.pg0))
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.vapi.dhcp6_client_enable_disable(sw_if_index=self.pg0.sw_if_index,
enable=1)
def tearDown(self):
self.vapi.dhcp6_client_enable_disable(sw_if_index=self.pg0.sw_if_index,
enable=0)
for i in self.interfaces:
i.admin_down()
super(TestDHCPv6IANAControlPlane, self).tearDown()
@staticmethod
def get_interface_addresses(fib, pg):
lst = []
for entry in fib:
if entry.route.prefix.prefixlen == 128:
path = entry.route.paths[0]
if path.sw_if_index == pg.sw_if_index:
lst.append(str(entry.route.prefix.network_address))
return lst
def get_addresses(self):
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg0))
return addresses.difference(self.initial_addresses)
def validate_duid_ll(self, duid):
DUID_LL(duid)
def validate_packet(self, packet, msg_type, is_resend=False):
try:
self.assertEqual(packet.haslayer(msg_type), 1)
client_duid = packet[DHCP6OptClientId].duid
if self.client_duid is None:
self.client_duid = client_duid
self.validate_duid_ll(client_duid)
else:
self.assertEqual(self.client_duid, client_duid)
if msg_type != DHCP6_Solicit and msg_type != DHCP6_Rebind:
server_duid = packet[DHCP6OptServerId].duid
self.assertEqual(server_duid, self.server_duid)
if is_resend:
self.assertEqual(self.trid, packet[msg_type].trid)
else:
self.trid = packet[msg_type].trid
ip = packet[IPv6]
udp = packet[UDP]
self.assertEqual(ip.dst, 'ff02::1:2')
self.assertEqual(udp.sport, 546)
self.assertEqual(udp.dport, 547)
dhcpv6 = packet[msg_type]
elapsed_time = dhcpv6[DHCP6OptElapsedTime]
if (is_resend):
self.assertNotEqual(elapsed_time.elapsedtime, 0)
else:
self.assertEqual(elapsed_time.elapsedtime, 0)
except BaseException:
packet.show()
raise
def wait_for_packet(self, msg_type, timeout=None, is_resend=False):
if timeout is None:
timeout = 3
rx_list = self.pg0.get_capture(1, timeout=timeout)
packet = rx_list[0]
self.validate_packet(packet, msg_type, is_resend=is_resend)
def wait_for_solicit(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Solicit, timeout, is_resend=is_resend)
def wait_for_request(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Request, timeout, is_resend=is_resend)
def wait_for_renew(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Renew, timeout, is_resend=is_resend)
def wait_for_rebind(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Rebind, timeout, is_resend=is_resend)
def wait_for_release(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Release, timeout, is_resend=is_resend)
def send_packet(self, msg_type, t1=None, t2=None, ianaopts=None):
if t1 is None:
t1 = self.T1
if t2 is None:
t2 = self.T2
if ianaopts is None:
opt_ia_na = DHCP6OptIA_NA(iaid=1, T1=t1, T2=t2)
else:
opt_ia_na = DHCP6OptIA_NA(iaid=1, T1=t1, T2=t2, ianaopts=ianaopts)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IPv6(src=util.mk_ll_addr(self.pg0.remote_mac),
dst=self.pg0.local_ip6_ll) /
UDP(sport=547, dport=546) /
msg_type(trid=self.trid) /
DHCP6OptServerId(duid=self.server_duid) /
DHCP6OptClientId(duid=self.client_duid) /
opt_ia_na
)
self.pg0.add_stream([p])
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
def send_advertise(self, t1=None, t2=None, ianaopts=None):
self.send_packet(DHCP6_Advertise, t1, t2, ianaopts)
def send_reply(self, t1=None, t2=None, ianaopts=None):
self.send_packet(DHCP6_Reply, t1, t2, ianaopts)
def test_T1_and_T2_timeouts(self):
""" Test T1 and T2 timeouts """
self.wait_for_solicit()
self.send_advertise()
self.wait_for_request()
self.send_reply()
self.sleep(1)
self.wait_for_renew()
self.pg_enable_capture(self.pg_interfaces)
self.sleep(1)
self.wait_for_rebind()
def test_addresses(self):
""" Test handling of addresses """
ia_na_opts = DHCP6OptIAAddress(addr='7:8::2', preflft=1,
validlft=2)
self.wait_for_solicit()
self.send_advertise(t1=20, t2=40, ianaopts=ia_na_opts)
self.wait_for_request()
self.send_reply(t1=20, t2=40, ianaopts=ia_na_opts)
self.sleep(0.1)
# check FIB for new address
new_addresses = self.get_addresses()
self.assertEqual(len(new_addresses), 1)
addr = list(new_addresses)[0]
self.assertEqual(addr, '7:8::2')
self.sleep(2)
# check that the address is deleted
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg0))
new_addresses = addresses.difference(self.initial_addresses)
self.assertEqual(len(new_addresses), 0)
def test_sending_client_messages_solicit(self):
""" VPP receives messages from DHCPv6 client """
self.wait_for_solicit()
self.send_packet(DHCP6_Solicit)
self.send_packet(DHCP6_Request)
self.send_packet(DHCP6_Renew)
self.send_packet(DHCP6_Rebind)
self.sleep(1)
self.wait_for_solicit(is_resend=True)
def test_sending_inappropriate_packets(self):
""" Server sends messages with inappropriate message types """
self.wait_for_solicit()
self.send_reply()
self.wait_for_solicit(is_resend=True)
self.send_advertise()
self.wait_for_request()
self.send_advertise()
self.wait_for_request(is_resend=True)
self.send_reply()
self.wait_for_renew()
def test_no_address_available_in_advertise(self):
""" Advertise message contains NoAddrsAvail status code """
self.wait_for_solicit()
noavail = DHCP6OptStatusCode(statuscode=2) # NoAddrsAvail
self.send_advertise(ianaopts=noavail)
self.wait_for_solicit(is_resend=True)
def test_preferred_greater_than_valid_lifetime(self):
""" Preferred lifetime is greater than valid lifetime """
self.wait_for_solicit()
self.send_advertise()
self.wait_for_request()
ia_na_opts = DHCP6OptIAAddress(addr='7:8::2', preflft=4, validlft=3)
self.send_reply(ianaopts=ia_na_opts)
self.sleep(0.5)
# check FIB contains no addresses
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg0))
new_addresses = addresses.difference(self.initial_addresses)
self.assertEqual(len(new_addresses), 0)
def test_T1_greater_than_T2(self):
""" T1 is greater than T2 """
self.wait_for_solicit()
self.send_advertise()
self.wait_for_request()
ia_na_opts = DHCP6OptIAAddress(addr='7:8::2', preflft=4, validlft=8)
self.send_reply(t1=80, t2=40, ianaopts=ia_na_opts)
self.sleep(0.5)
# check FIB contains no addresses
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg0))
new_addresses = addresses.difference(self.initial_addresses)
self.assertEqual(len(new_addresses), 0)
class TestDHCPv6PDControlPlane(VppTestCase):
""" DHCPv6 PD Control Plane Test Case """
@classmethod
def setUpClass(cls):
super(TestDHCPv6PDControlPlane, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(TestDHCPv6PDControlPlane, cls).tearDownClass()
def setUp(self):
super(TestDHCPv6PDControlPlane, self).setUp()
self.create_pg_interfaces(range(2))
self.interfaces = list(self.pg_interfaces)
for i in self.interfaces:
i.admin_up()
self.server_duid = DUID_LL(lladdr=self.pg0.remote_mac)
self.client_duid = None
self.T1 = 1
self.T2 = 2
fib = self.vapi.ip_route_dump(0, True)
self.initial_addresses = set(self.get_interface_addresses(fib,
self.pg1))
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.prefix_group = 'my-pd-prefix-group'
self.vapi.dhcp6_pd_client_enable_disable(
enable=1,
sw_if_index=self.pg0.sw_if_index,
prefix_group=self.prefix_group)
def tearDown(self):
self.vapi.dhcp6_pd_client_enable_disable(self.pg0.sw_if_index,
enable=0)
for i in self.interfaces:
i.admin_down()
super(TestDHCPv6PDControlPlane, self).tearDown()
@staticmethod
def get_interface_addresses(fib, pg):
lst = []
for entry in fib:
if entry.route.prefix.prefixlen == 128:
path = entry.route.paths[0]
if path.sw_if_index == pg.sw_if_index:
lst.append(str(entry.route.prefix.network_address))
return lst
def get_addresses(self):
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg1))
return addresses.difference(self.initial_addresses)
def validate_duid_ll(self, duid):
DUID_LL(duid)
def validate_packet(self, packet, msg_type, is_resend=False):
try:
self.assertEqual(packet.haslayer(msg_type), 1)
client_duid = packet[DHCP6OptClientId].duid
if self.client_duid is None:
self.client_duid = client_duid
self.validate_duid_ll(client_duid)
else:
self.assertEqual(self.client_duid, client_duid)
if msg_type != DHCP6_Solicit and msg_type != DHCP6_Rebind:
server_duid = packet[DHCP6OptServerId].duid
self.assertEqual(server_duid, self.server_duid)
if is_resend:
self.assertEqual(self.trid, packet[msg_type].trid)
else:
self.trid = packet[msg_type].trid
ip = packet[IPv6]
udp = packet[UDP]
self.assertEqual(ip.dst, 'ff02::1:2')
self.assertEqual(udp.sport, 546)
self.assertEqual(udp.dport, 547)
dhcpv6 = packet[msg_type]
elapsed_time = dhcpv6[DHCP6OptElapsedTime]
if (is_resend):
self.assertNotEqual(elapsed_time.elapsedtime, 0)
else:
self.assertEqual(elapsed_time.elapsedtime, 0)
except BaseException:
packet.show()
raise
def wait_for_packet(self, msg_type, timeout=None, is_resend=False):
if timeout is None:
timeout = 3
rx_list = self.pg0.get_capture(1, timeout=timeout)
packet = rx_list[0]
self.validate_packet(packet, msg_type, is_resend=is_resend)
def wait_for_solicit(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Solicit, timeout, is_resend=is_resend)
def wait_for_request(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Request, timeout, is_resend=is_resend)
def wait_for_renew(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Renew, timeout, is_resend=is_resend)
def wait_for_rebind(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Rebind, timeout, is_resend=is_resend)
def wait_for_release(self, timeout=None, is_resend=False):
self.wait_for_packet(DHCP6_Release, timeout, is_resend=is_resend)
def send_packet(self, msg_type, t1=None, t2=None, iapdopt=None):
if t1 is None:
t1 = self.T1
if t2 is None:
t2 = self.T2
if iapdopt is None:
opt_ia_pd = DHCP6OptIA_PD(iaid=1, T1=t1, T2=t2)
else:
opt_ia_pd = DHCP6OptIA_PD(iaid=1, T1=t1, T2=t2, iapdopt=iapdopt)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IPv6(src=util.mk_ll_addr(self.pg0.remote_mac),
dst=self.pg0.local_ip6_ll) /
UDP(sport=547, dport=546) /
msg_type(trid=self.trid) /
DHCP6OptServerId(duid=self.server_duid) /
DHCP6OptClientId(duid=self.client_duid) /
opt_ia_pd
)
self.pg0.add_stream([p])
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
def send_advertise(self, t1=None, t2=None, iapdopt=None):
self.send_packet(DHCP6_Advertise, t1, t2, iapdopt)
def send_reply(self, t1=None, t2=None, iapdopt=None):
self.send_packet(DHCP6_Reply, t1, t2, iapdopt)
def test_T1_and_T2_timeouts(self):
""" Test T1 and T2 timeouts """
self.wait_for_solicit()
self.send_advertise()
self.wait_for_request()
self.send_reply()
self.sleep(1)
self.wait_for_renew()
self.pg_enable_capture(self.pg_interfaces)
self.sleep(1)
self.wait_for_rebind()
def test_prefixes(self):
""" Test handling of prefixes """
address1 = '::2:0:0:0:405/60'
address2 = '::76:0:0:0:406/62'
try:
self.vapi.ip6_add_del_address_using_prefix(
sw_if_index=self.pg1.sw_if_index,
address_with_prefix=address1,
prefix_group=self.prefix_group)
ia_pd_opts = DHCP6OptIAPrefix(prefix='7:8::', plen=56, preflft=2,
validlft=3)
self.wait_for_solicit()
self.send_advertise(t1=20, t2=40, iapdopt=ia_pd_opts)
self.wait_for_request()
self.send_reply(t1=20, t2=40, iapdopt=ia_pd_opts)
self.sleep(0.1)
# check FIB for new address
new_addresses = self.get_addresses()
self.assertEqual(len(new_addresses), 1)
addr = list(new_addresses)[0]
self.assertEqual(addr, '7:8:0:2::405')
self.sleep(1)
self.vapi.ip6_add_del_address_using_prefix(
sw_if_index=self.pg1.sw_if_index,
address_with_prefix=address2,
prefix_group=self.prefix_group)
self.sleep(1)
# check FIB contains 2 addresses
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg1))
new_addresses = addresses.difference(self.initial_addresses)
self.assertEqual(len(new_addresses), 2)
addr1 = list(new_addresses)[0]
addr2 = list(new_addresses)[1]
if addr1 == '7:8:0:76::406':
addr1, addr2 = addr2, addr1
self.assertEqual(addr1, '7:8:0:2::405')
self.assertEqual(addr2, '7:8:0:76::406')
self.sleep(1)
# check that the addresses are deleted
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg1))
new_addresses = addresses.difference(self.initial_addresses)
self.assertEqual(len(new_addresses), 0)
finally:
if address1 is not None:
self.vapi.ip6_add_del_address_using_prefix(
sw_if_index=self.pg1.sw_if_index,
address_with_prefix=address1,
prefix_group=self.prefix_group, is_add=0)
if address2 is not None:
self.vapi.ip6_add_del_address_using_prefix(
sw_if_index=self.pg1.sw_if_index,
address_with_prefix=address2,
prefix_group=self.prefix_group, is_add=0)
def test_sending_client_messages_solicit(self):
""" VPP receives messages from DHCPv6 client """
self.wait_for_solicit()
self.send_packet(DHCP6_Solicit)
self.send_packet(DHCP6_Request)
self.send_packet(DHCP6_Renew)
self.send_packet(DHCP6_Rebind)
self.sleep(1)
self.wait_for_solicit(is_resend=True)
def test_sending_inappropriate_packets(self):
""" Server sends messages with inappropriate message types """
self.wait_for_solicit()
self.send_reply()
self.wait_for_solicit(is_resend=True)
self.send_advertise()
self.wait_for_request()
self.send_advertise()
self.wait_for_request(is_resend=True)
self.send_reply()
self.wait_for_renew()
def test_no_prefix_available_in_advertise(self):
""" Advertise message contains NoPrefixAvail status code """
self.wait_for_solicit()
noavail = DHCP6OptStatusCode(statuscode=6) # NoPrefixAvail
self.send_advertise(iapdopt=noavail)
self.wait_for_solicit(is_resend=True)
def test_preferred_greater_than_valid_lifetime(self):
""" Preferred lifetime is greater than valid lifetime """
address1 = '::2:0:0:0:405/60'
try:
self.vapi.ip6_add_del_address_using_prefix(
sw_if_index=self.pg1.sw_if_index,
address_with_prefix=address1,
prefix_group=self.prefix_group)
self.wait_for_solicit()
self.send_advertise()
self.wait_for_request()
ia_pd_opts = DHCP6OptIAPrefix(prefix='7:8::', plen=56, preflft=4,
validlft=3)
self.send_reply(iapdopt=ia_pd_opts)
self.sleep(0.5)
# check FIB contains no addresses
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg1))
new_addresses = addresses.difference(self.initial_addresses)
self.assertEqual(len(new_addresses), 0)
finally:
self.vapi.ip6_add_del_address_using_prefix(
sw_if_index=self.pg1.sw_if_index,
address_with_prefix=address1,
prefix_group=self.prefix_group,
is_add=0)
def test_T1_greater_than_T2(self):
""" T1 is greater than T2 """
address1 = '::2:0:0:0:405/60'
try:
self.vapi.ip6_add_del_address_using_prefix(
sw_if_index=self.pg1.sw_if_index,
address_with_prefix=address1,
prefix_group=self.prefix_group)
self.wait_for_solicit()
self.send_advertise()
self.wait_for_request()
ia_pd_opts = DHCP6OptIAPrefix(prefix='7:8::', plen=56, preflft=4,
validlft=8)
self.send_reply(t1=80, t2=40, iapdopt=ia_pd_opts)
self.sleep(0.5)
# check FIB contains no addresses
fib = self.vapi.ip_route_dump(0, True)
addresses = set(self.get_interface_addresses(fib, self.pg1))
new_addresses = addresses.difference(self.initial_addresses)
self.assertEqual(len(new_addresses), 0)
finally:
self.vapi.ip6_add_del_address_using_prefix(
sw_if_index=self.pg1.sw_if_index,
prefix_group=self.prefix_group,
address_with_prefix=address1,
is_add=False)
| 36.663342
| 79
| 0.609713
| 3,648
| 29,404
| 4.650768
| 0.07045
| 0.025581
| 0.031121
| 0.021219
| 0.869445
| 0.851114
| 0.83243
| 0.796298
| 0.779559
| 0.753861
| 0
| 0.036392
| 0.292579
| 29,404
| 801
| 80
| 36.709114
| 0.779241
| 0.036594
| 0
| 0.751212
| 0
| 0
| 0.014289
| 0
| 0
| 0
| 0.000709
| 0
| 0.11147
| 1
| 0.088853
| false
| 0
| 0.012924
| 0.001616
| 0.114701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f44f0d37638b83ba66ad99022a08bfbe8ea97063
| 12,274
|
py
|
Python
|
test/k8s_metrics_tests/metric_data/summary.py
|
alexanderldavis/splunk-connect-for-kubernetes
|
01bf0876e5f355b26825821a3b1d1a398cde4fde
|
[
"Apache-2.0"
] | null | null | null |
test/k8s_metrics_tests/metric_data/summary.py
|
alexanderldavis/splunk-connect-for-kubernetes
|
01bf0876e5f355b26825821a3b1d1a398cde4fde
|
[
"Apache-2.0"
] | null | null | null |
test/k8s_metrics_tests/metric_data/summary.py
|
alexanderldavis/splunk-connect-for-kubernetes
|
01bf0876e5f355b26825821a3b1d1a398cde4fde
|
[
"Apache-2.0"
] | 1
|
2022-03-16T09:54:43.000Z
|
2022-03-16T09:54:43.000Z
|
from test.k8s_metrics_tests.metric_data.helper import (
greater,
greater_or_equal,
should_exist,
)
summary_metrics = [
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.uptime",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.cpu.usage_rate",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.cpu.usage",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.memory.available_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.memory.usage_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.memory.rss_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.node.memory.page_faults",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.node.memory.major_page_faults",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.network.rx_bytes",
"selector": "node",
},
{
"assertions": [[greater_or_equal, 0]],
"name": "kube.node.network.rx_errors",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.network.tx_bytes",
"selector": "node",
},
{
"assertions": [[greater_or_equal, 0]],
"name": "kube.node.network.tx_errors",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.fs.available_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.fs.capacity_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.fs.used_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.fs.inodes_free",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.fs.inodes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.fs.inodes_used",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.imagefs.available_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.imagefs.capacity_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.imagefs.used_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.imagefs.inodes_free",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.imagefs.inodes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.imagefs.inodes_used",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.runtime.imagefs.maxpid",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.runtime.imagefs.curproc",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.uptime",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.cpu.usage",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.memory.available_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.memory.usage_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.memory.rss_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.node.memory.rss_bytes",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.node.memory.page_faults",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.node.memory.major_page_faults",
"selector": "node",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.uptime",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.cpu.usage_rate",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.cpu.usage",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.memory.available_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.memory.usage_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.memory.rss_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.pod.memory.page_faults",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.pod.memory.major_page_faults",
"selector": "pod-name",
},
{
"assertions": [[greater_or_equal, 0]],
"name": "kube.pod.network.rx_bytes",
"selector": "pod-name",
},
{
"assertions": [[greater_or_equal, 0]],
"name": "kube.pod.network.rx_errors",
"selector": "pod-name",
},
{
"assertions": [[greater_or_equal, 0]],
"name": "kube.pod.network.tx_bytes",
"selector": "pod-name",
},
{
"assertions": [[greater_or_equal, 0]],
"name": "kube.pod.network.tx_errors",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.ephemeral-storage.available_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.ephemeral-storage.capacity_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.ephemeral-storage.used_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.ephemeral-storage.inodes_free",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.ephemeral-storage.inodes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.ephemeral-storage.inodes_used",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.volume.available_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.volume.capacity_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.volume.used_bytes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.volume.inodes_free",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.volume.inodes",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.pod.volume.inodes_used",
"selector": "pod-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.uptime",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.cpu.usage_rate",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.cpu.usage",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.memory.available_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.memory.usage_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.memory.rss_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.container.memory.page_faults",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.container.memory.major_page_faults",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.memory.working_set_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.rootfs.available_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.rootfs.capacity_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater_or_equal, 0]],
"name": "kube.container.rootfs.used_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.rootfs.inodes_free",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.rootfs.inodes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.rootfs.inodes_used",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.logs.available_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.logs.capacity_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.logs.used_bytes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.logs.inodes_free",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.logs.inodes",
"selector": "container-name",
},
{
"assertions": [[should_exist, None], [greater, 0]],
"name": "kube.container.logs.inodes_used",
"selector": "container-name",
},
]
| 30.381188
| 68
| 0.519716
| 1,173
| 12,274
| 5.286445
| 0.046888
| 0.063699
| 0.114659
| 0.294307
| 0.975165
| 0.965973
| 0.961458
| 0.961458
| 0.961458
| 0.960168
| 0
| 0.009025
| 0.277823
| 12,274
| 403
| 69
| 30.456576
| 0.690546
| 0
| 0
| 0.430348
| 0
| 0
| 0.377383
| 0.175167
| 0
| 0
| 0
| 0
| 0.196517
| 1
| 0
| false
| 0
| 0.002488
| 0
| 0.002488
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f454b1ba6992ac7c2924580cb5f0e66399692ad1
| 111
|
py
|
Python
|
tests/conftest.py
|
hemanthnakkina/layer-etcd
|
ab21eb62984c0f84088f11881e5ad2783388557b
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2016-05-30T02:58:43.000Z
|
2019-03-07T09:55:04.000Z
|
tests/conftest.py
|
hemanthnakkina/layer-etcd
|
ab21eb62984c0f84088f11881e5ad2783388557b
|
[
"ECL-2.0",
"Apache-2.0"
] | 122
|
2016-04-18T15:31:24.000Z
|
2019-01-29T14:00:58.000Z
|
tests/conftest.py
|
hemanthnakkina/layer-etcd
|
ab21eb62984c0f84088f11881e5ad2783388557b
|
[
"ECL-2.0",
"Apache-2.0"
] | 15
|
2016-03-21T10:32:15.000Z
|
2019-01-29T08:03:00.000Z
|
import charms.unit_test
charms.unit_test.patch_reactive()
charms.unit_test.patch_module("charms.leadership")
| 18.5
| 50
| 0.837838
| 16
| 111
| 5.5
| 0.5
| 0.340909
| 0.477273
| 0.431818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 111
| 5
| 51
| 22.2
| 0.838095
| 0
| 0
| 0
| 0
| 0
| 0.153153
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f48be25a7665c1b91b26d93c0ed4c00c260125e4
| 121,148
|
py
|
Python
|
sdk/python/pulumi_kubernetes/apps/v1/_inputs.py
|
axis-edge/pulumi-kubernetes
|
cb2803c54ec2131c04564f863dd3577284fa1650
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_kubernetes/apps/v1/_inputs.py
|
axis-edge/pulumi-kubernetes
|
cb2803c54ec2131c04564f863dd3577284fa1650
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_kubernetes/apps/v1/_inputs.py
|
axis-edge/pulumi-kubernetes
|
cb2803c54ec2131c04564f863dd3577284fa1650
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ... import core as _core
from ... import meta as _meta
__all__ = [
'ControllerRevisionArgs',
'DaemonSetConditionArgs',
'DaemonSetSpecArgs',
'DaemonSetStatusArgs',
'DaemonSetUpdateStrategyArgs',
'DaemonSetArgs',
'DeploymentConditionArgs',
'DeploymentSpecArgs',
'DeploymentStatusArgs',
'DeploymentStrategyArgs',
'DeploymentArgs',
'ReplicaSetConditionArgs',
'ReplicaSetSpecArgs',
'ReplicaSetStatusArgs',
'ReplicaSetArgs',
'RollingUpdateDaemonSetArgs',
'RollingUpdateDeploymentArgs',
'RollingUpdateStatefulSetStrategyArgs',
'StatefulSetConditionArgs',
'StatefulSetPersistentVolumeClaimRetentionPolicyArgs',
'StatefulSetSpecArgs',
'StatefulSetStatusArgs',
'StatefulSetUpdateStrategyArgs',
'StatefulSetArgs',
]
@pulumi.input_type
class ControllerRevisionArgs:
def __init__(__self__, *,
revision: pulumi.Input[int],
api_version: Optional[pulumi.Input[str]] = None,
data: Optional[Any] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None):
"""
ControllerRevision implements an immutable snapshot of state data. Clients are responsible for serializing and deserializing the objects that contain their internal state. Once a ControllerRevision has been successfully created, it can not be updated. The API Server will fail validation of all requests that attempt to mutate the Data field. ControllerRevisions may, however, be deleted. Note that, due to its use by both the DaemonSet and StatefulSet controllers for update and rollback, this object is beta. However, it may be subject to name and representation changes in future releases, and clients should not depend on its stability. It is primarily for internal use by controllers.
:param pulumi.Input[int] revision: Revision indicates the revision of the state represented by Data.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param Any data: Data is the serialized representation of the state.
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
pulumi.set(__self__, "revision", revision)
if api_version is not None:
pulumi.set(__self__, "api_version", 'apps/v1')
if data is not None:
pulumi.set(__self__, "data", data)
if kind is not None:
pulumi.set(__self__, "kind", 'ControllerRevision')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
@property
@pulumi.getter
def revision(self) -> pulumi.Input[int]:
"""
Revision indicates the revision of the state represented by Data.
"""
return pulumi.get(self, "revision")
@revision.setter
def revision(self, value: pulumi.Input[int]):
pulumi.set(self, "revision", value)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def data(self) -> Optional[Any]:
"""
Data is the serialized representation of the state.
"""
return pulumi.get(self, "data")
@data.setter
def data(self, value: Optional[Any]):
pulumi.set(self, "data", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@pulumi.input_type
class DaemonSetConditionArgs:
def __init__(__self__, *,
status: pulumi.Input[str],
type: pulumi.Input[str],
last_transition_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None):
"""
DaemonSetCondition describes the state of a DaemonSet at a certain point.
:param pulumi.Input[str] status: Status of the condition, one of True, False, Unknown.
:param pulumi.Input[str] type: Type of DaemonSet condition.
:param pulumi.Input[str] last_transition_time: Last time the condition transitioned from one status to another.
:param pulumi.Input[str] message: A human readable message indicating details about the transition.
:param pulumi.Input[str] reason: The reason for the condition's last transition.
"""
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "type", type)
if last_transition_time is not None:
pulumi.set(__self__, "last_transition_time", last_transition_time)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def status(self) -> pulumi.Input[str]:
"""
Status of the condition, one of True, False, Unknown.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: pulumi.Input[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Type of DaemonSet condition.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="lastTransitionTime")
def last_transition_time(self) -> Optional[pulumi.Input[str]]:
"""
Last time the condition transitioned from one status to another.
"""
return pulumi.get(self, "last_transition_time")
@last_transition_time.setter
def last_transition_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_transition_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
A human readable message indicating details about the transition.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
"""
The reason for the condition's last transition.
"""
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@pulumi.input_type
class DaemonSetSpecArgs:
def __init__(__self__, *,
selector: pulumi.Input['_meta.v1.LabelSelectorArgs'],
template: pulumi.Input['_core.v1.PodTemplateSpecArgs'],
min_ready_seconds: Optional[pulumi.Input[int]] = None,
revision_history_limit: Optional[pulumi.Input[int]] = None,
update_strategy: Optional[pulumi.Input['DaemonSetUpdateStrategyArgs']] = None):
"""
DaemonSetSpec is the specification of a daemon set.
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] selector: A label query over pods that are managed by the daemon set. Must match in order to be controlled. It must match the pod template's labels. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
:param pulumi.Input['_core.v1.PodTemplateSpecArgs'] template: An object that describes the pod that will be created. The DaemonSet will create exactly one copy of this pod on every node that matches the template's node selector (or on every node if no node selector is specified). More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller#pod-template
:param pulumi.Input[int] min_ready_seconds: The minimum number of seconds for which a newly created DaemonSet pod should be ready without any of its container crashing, for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready).
:param pulumi.Input[int] revision_history_limit: The number of old history to retain to allow rollback. This is a pointer to distinguish between explicit zero and not specified. Defaults to 10.
:param pulumi.Input['DaemonSetUpdateStrategyArgs'] update_strategy: An update strategy to replace existing DaemonSet pods with new pods.
"""
pulumi.set(__self__, "selector", selector)
pulumi.set(__self__, "template", template)
if min_ready_seconds is not None:
pulumi.set(__self__, "min_ready_seconds", min_ready_seconds)
if revision_history_limit is not None:
pulumi.set(__self__, "revision_history_limit", revision_history_limit)
if update_strategy is not None:
pulumi.set(__self__, "update_strategy", update_strategy)
@property
@pulumi.getter
def selector(self) -> pulumi.Input['_meta.v1.LabelSelectorArgs']:
"""
A label query over pods that are managed by the daemon set. Must match in order to be controlled. It must match the pod template's labels. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: pulumi.Input['_meta.v1.LabelSelectorArgs']):
pulumi.set(self, "selector", value)
@property
@pulumi.getter
def template(self) -> pulumi.Input['_core.v1.PodTemplateSpecArgs']:
"""
An object that describes the pod that will be created. The DaemonSet will create exactly one copy of this pod on every node that matches the template's node selector (or on every node if no node selector is specified). More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller#pod-template
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: pulumi.Input['_core.v1.PodTemplateSpecArgs']):
pulumi.set(self, "template", value)
@property
@pulumi.getter(name="minReadySeconds")
def min_ready_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The minimum number of seconds for which a newly created DaemonSet pod should be ready without any of its container crashing, for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready).
"""
return pulumi.get(self, "min_ready_seconds")
@min_ready_seconds.setter
def min_ready_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_ready_seconds", value)
@property
@pulumi.getter(name="revisionHistoryLimit")
def revision_history_limit(self) -> Optional[pulumi.Input[int]]:
"""
The number of old history to retain to allow rollback. This is a pointer to distinguish between explicit zero and not specified. Defaults to 10.
"""
return pulumi.get(self, "revision_history_limit")
@revision_history_limit.setter
def revision_history_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "revision_history_limit", value)
@property
@pulumi.getter(name="updateStrategy")
def update_strategy(self) -> Optional[pulumi.Input['DaemonSetUpdateStrategyArgs']]:
"""
An update strategy to replace existing DaemonSet pods with new pods.
"""
return pulumi.get(self, "update_strategy")
@update_strategy.setter
def update_strategy(self, value: Optional[pulumi.Input['DaemonSetUpdateStrategyArgs']]):
pulumi.set(self, "update_strategy", value)
@pulumi.input_type
class DaemonSetStatusArgs:
def __init__(__self__, *,
current_number_scheduled: pulumi.Input[int],
desired_number_scheduled: pulumi.Input[int],
number_misscheduled: pulumi.Input[int],
number_ready: pulumi.Input[int],
collision_count: Optional[pulumi.Input[int]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['DaemonSetConditionArgs']]]] = None,
number_available: Optional[pulumi.Input[int]] = None,
number_unavailable: Optional[pulumi.Input[int]] = None,
observed_generation: Optional[pulumi.Input[int]] = None,
updated_number_scheduled: Optional[pulumi.Input[int]] = None):
"""
DaemonSetStatus represents the current status of a daemon set.
:param pulumi.Input[int] current_number_scheduled: The number of nodes that are running at least 1 daemon pod and are supposed to run the daemon pod. More info: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/
:param pulumi.Input[int] desired_number_scheduled: The total number of nodes that should be running the daemon pod (including nodes correctly running the daemon pod). More info: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/
:param pulumi.Input[int] number_misscheduled: The number of nodes that are running the daemon pod, but are not supposed to run the daemon pod. More info: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/
:param pulumi.Input[int] number_ready: numberReady is the number of nodes that should be running the daemon pod and have one or more of the daemon pod running with a Ready Condition.
:param pulumi.Input[int] collision_count: Count of hash collisions for the DaemonSet. The DaemonSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
:param pulumi.Input[Sequence[pulumi.Input['DaemonSetConditionArgs']]] conditions: Represents the latest available observations of a DaemonSet's current state.
:param pulumi.Input[int] number_available: The number of nodes that should be running the daemon pod and have one or more of the daemon pod running and available (ready for at least spec.minReadySeconds)
:param pulumi.Input[int] number_unavailable: The number of nodes that should be running the daemon pod and have none of the daemon pod running and available (ready for at least spec.minReadySeconds)
:param pulumi.Input[int] observed_generation: The most recent generation observed by the daemon set controller.
:param pulumi.Input[int] updated_number_scheduled: The total number of nodes that are running updated daemon pod
"""
pulumi.set(__self__, "current_number_scheduled", current_number_scheduled)
pulumi.set(__self__, "desired_number_scheduled", desired_number_scheduled)
pulumi.set(__self__, "number_misscheduled", number_misscheduled)
pulumi.set(__self__, "number_ready", number_ready)
if collision_count is not None:
pulumi.set(__self__, "collision_count", collision_count)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if number_available is not None:
pulumi.set(__self__, "number_available", number_available)
if number_unavailable is not None:
pulumi.set(__self__, "number_unavailable", number_unavailable)
if observed_generation is not None:
pulumi.set(__self__, "observed_generation", observed_generation)
if updated_number_scheduled is not None:
pulumi.set(__self__, "updated_number_scheduled", updated_number_scheduled)
@property
@pulumi.getter(name="currentNumberScheduled")
def current_number_scheduled(self) -> pulumi.Input[int]:
"""
The number of nodes that are running at least 1 daemon pod and are supposed to run the daemon pod. More info: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/
"""
return pulumi.get(self, "current_number_scheduled")
@current_number_scheduled.setter
def current_number_scheduled(self, value: pulumi.Input[int]):
pulumi.set(self, "current_number_scheduled", value)
@property
@pulumi.getter(name="desiredNumberScheduled")
def desired_number_scheduled(self) -> pulumi.Input[int]:
"""
The total number of nodes that should be running the daemon pod (including nodes correctly running the daemon pod). More info: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/
"""
return pulumi.get(self, "desired_number_scheduled")
@desired_number_scheduled.setter
def desired_number_scheduled(self, value: pulumi.Input[int]):
pulumi.set(self, "desired_number_scheduled", value)
@property
@pulumi.getter(name="numberMisscheduled")
def number_misscheduled(self) -> pulumi.Input[int]:
"""
The number of nodes that are running the daemon pod, but are not supposed to run the daemon pod. More info: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/
"""
return pulumi.get(self, "number_misscheduled")
@number_misscheduled.setter
def number_misscheduled(self, value: pulumi.Input[int]):
pulumi.set(self, "number_misscheduled", value)
@property
@pulumi.getter(name="numberReady")
def number_ready(self) -> pulumi.Input[int]:
"""
numberReady is the number of nodes that should be running the daemon pod and have one or more of the daemon pod running with a Ready Condition.
"""
return pulumi.get(self, "number_ready")
@number_ready.setter
def number_ready(self, value: pulumi.Input[int]):
pulumi.set(self, "number_ready", value)
@property
@pulumi.getter(name="collisionCount")
def collision_count(self) -> Optional[pulumi.Input[int]]:
"""
Count of hash collisions for the DaemonSet. The DaemonSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
"""
return pulumi.get(self, "collision_count")
@collision_count.setter
def collision_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "collision_count", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DaemonSetConditionArgs']]]]:
"""
Represents the latest available observations of a DaemonSet's current state.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DaemonSetConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="numberAvailable")
def number_available(self) -> Optional[pulumi.Input[int]]:
"""
The number of nodes that should be running the daemon pod and have one or more of the daemon pod running and available (ready for at least spec.minReadySeconds)
"""
return pulumi.get(self, "number_available")
@number_available.setter
def number_available(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number_available", value)
@property
@pulumi.getter(name="numberUnavailable")
def number_unavailable(self) -> Optional[pulumi.Input[int]]:
"""
The number of nodes that should be running the daemon pod and have none of the daemon pod running and available (ready for at least spec.minReadySeconds)
"""
return pulumi.get(self, "number_unavailable")
@number_unavailable.setter
def number_unavailable(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number_unavailable", value)
@property
@pulumi.getter(name="observedGeneration")
def observed_generation(self) -> Optional[pulumi.Input[int]]:
"""
The most recent generation observed by the daemon set controller.
"""
return pulumi.get(self, "observed_generation")
@observed_generation.setter
def observed_generation(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "observed_generation", value)
@property
@pulumi.getter(name="updatedNumberScheduled")
def updated_number_scheduled(self) -> Optional[pulumi.Input[int]]:
"""
The total number of nodes that are running updated daemon pod
"""
return pulumi.get(self, "updated_number_scheduled")
@updated_number_scheduled.setter
def updated_number_scheduled(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "updated_number_scheduled", value)
@pulumi.input_type
class DaemonSetUpdateStrategyArgs:
def __init__(__self__, *,
rolling_update: Optional[pulumi.Input['RollingUpdateDaemonSetArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
"""
DaemonSetUpdateStrategy is a struct used to control the update strategy for a DaemonSet.
:param pulumi.Input['RollingUpdateDaemonSetArgs'] rolling_update: Rolling update config params. Present only if type = "RollingUpdate".
:param pulumi.Input[str] type: Type of daemon set update. Can be "RollingUpdate" or "OnDelete". Default is RollingUpdate.
Possible enum values:
- `"OnDelete"` Replace the old daemons only when it's killed
- `"RollingUpdate"` Replace the old daemons by new ones using rolling update i.e replace them on each node one after the other.
"""
if rolling_update is not None:
pulumi.set(__self__, "rolling_update", rolling_update)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="rollingUpdate")
def rolling_update(self) -> Optional[pulumi.Input['RollingUpdateDaemonSetArgs']]:
"""
Rolling update config params. Present only if type = "RollingUpdate".
"""
return pulumi.get(self, "rolling_update")
@rolling_update.setter
def rolling_update(self, value: Optional[pulumi.Input['RollingUpdateDaemonSetArgs']]):
pulumi.set(self, "rolling_update", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type of daemon set update. Can be "RollingUpdate" or "OnDelete". Default is RollingUpdate.
Possible enum values:
- `"OnDelete"` Replace the old daemons only when it's killed
- `"RollingUpdate"` Replace the old daemons by new ones using rolling update i.e replace them on each node one after the other.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class DaemonSetArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
spec: Optional[pulumi.Input['DaemonSetSpecArgs']] = None,
status: Optional[pulumi.Input['DaemonSetStatusArgs']] = None):
"""
DaemonSet represents the configuration of a daemon set.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['DaemonSetSpecArgs'] spec: The desired behavior of this daemon set. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
:param pulumi.Input['DaemonSetStatusArgs'] status: The current status of this daemon set. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
if api_version is not None:
pulumi.set(__self__, "api_version", 'apps/v1')
if kind is not None:
pulumi.set(__self__, "kind", 'DaemonSet')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['DaemonSetSpecArgs']]:
"""
The desired behavior of this daemon set. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['DaemonSetSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['DaemonSetStatusArgs']]:
"""
The current status of this daemon set. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['DaemonSetStatusArgs']]):
pulumi.set(self, "status", value)
@pulumi.input_type
class DeploymentConditionArgs:
def __init__(__self__, *,
status: pulumi.Input[str],
type: pulumi.Input[str],
last_transition_time: Optional[pulumi.Input[str]] = None,
last_update_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None):
"""
DeploymentCondition describes the state of a deployment at a certain point.
:param pulumi.Input[str] status: Status of the condition, one of True, False, Unknown.
:param pulumi.Input[str] type: Type of deployment condition.
:param pulumi.Input[str] last_transition_time: Last time the condition transitioned from one status to another.
:param pulumi.Input[str] last_update_time: The last time this condition was updated.
:param pulumi.Input[str] message: A human readable message indicating details about the transition.
:param pulumi.Input[str] reason: The reason for the condition's last transition.
"""
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "type", type)
if last_transition_time is not None:
pulumi.set(__self__, "last_transition_time", last_transition_time)
if last_update_time is not None:
pulumi.set(__self__, "last_update_time", last_update_time)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def status(self) -> pulumi.Input[str]:
"""
Status of the condition, one of True, False, Unknown.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: pulumi.Input[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Type of deployment condition.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="lastTransitionTime")
def last_transition_time(self) -> Optional[pulumi.Input[str]]:
"""
Last time the condition transitioned from one status to another.
"""
return pulumi.get(self, "last_transition_time")
@last_transition_time.setter
def last_transition_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_transition_time", value)
@property
@pulumi.getter(name="lastUpdateTime")
def last_update_time(self) -> Optional[pulumi.Input[str]]:
"""
The last time this condition was updated.
"""
return pulumi.get(self, "last_update_time")
@last_update_time.setter
def last_update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_update_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
A human readable message indicating details about the transition.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
"""
The reason for the condition's last transition.
"""
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@pulumi.input_type
class DeploymentSpecArgs:
def __init__(__self__, *,
selector: pulumi.Input['_meta.v1.LabelSelectorArgs'],
template: pulumi.Input['_core.v1.PodTemplateSpecArgs'],
min_ready_seconds: Optional[pulumi.Input[int]] = None,
paused: Optional[pulumi.Input[bool]] = None,
progress_deadline_seconds: Optional[pulumi.Input[int]] = None,
replicas: Optional[pulumi.Input[int]] = None,
revision_history_limit: Optional[pulumi.Input[int]] = None,
strategy: Optional[pulumi.Input['DeploymentStrategyArgs']] = None):
"""
DeploymentSpec is the specification of the desired behavior of the Deployment.
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] selector: Label selector for pods. Existing ReplicaSets whose pods are selected by this will be the ones affected by this deployment. It must match the pod template's labels.
:param pulumi.Input['_core.v1.PodTemplateSpecArgs'] template: Template describes the pods that will be created.
:param pulumi.Input[int] min_ready_seconds: Minimum number of seconds for which a newly created pod should be ready without any of its container crashing, for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready)
:param pulumi.Input[bool] paused: Indicates that the deployment is paused.
:param pulumi.Input[int] progress_deadline_seconds: The maximum time in seconds for a deployment to make progress before it is considered to be failed. The deployment controller will continue to process failed deployments and a condition with a ProgressDeadlineExceeded reason will be surfaced in the deployment status. Note that progress will not be estimated during the time a deployment is paused. Defaults to 600s.
:param pulumi.Input[int] replicas: Number of desired pods. This is a pointer to distinguish between explicit zero and not specified. Defaults to 1.
:param pulumi.Input[int] revision_history_limit: The number of old ReplicaSets to retain to allow rollback. This is a pointer to distinguish between explicit zero and not specified. Defaults to 10.
:param pulumi.Input['DeploymentStrategyArgs'] strategy: The deployment strategy to use to replace existing pods with new ones.
"""
pulumi.set(__self__, "selector", selector)
pulumi.set(__self__, "template", template)
if min_ready_seconds is not None:
pulumi.set(__self__, "min_ready_seconds", min_ready_seconds)
if paused is not None:
pulumi.set(__self__, "paused", paused)
if progress_deadline_seconds is not None:
pulumi.set(__self__, "progress_deadline_seconds", progress_deadline_seconds)
if replicas is not None:
pulumi.set(__self__, "replicas", replicas)
if revision_history_limit is not None:
pulumi.set(__self__, "revision_history_limit", revision_history_limit)
if strategy is not None:
pulumi.set(__self__, "strategy", strategy)
@property
@pulumi.getter
def selector(self) -> pulumi.Input['_meta.v1.LabelSelectorArgs']:
"""
Label selector for pods. Existing ReplicaSets whose pods are selected by this will be the ones affected by this deployment. It must match the pod template's labels.
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: pulumi.Input['_meta.v1.LabelSelectorArgs']):
pulumi.set(self, "selector", value)
@property
@pulumi.getter
def template(self) -> pulumi.Input['_core.v1.PodTemplateSpecArgs']:
"""
Template describes the pods that will be created.
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: pulumi.Input['_core.v1.PodTemplateSpecArgs']):
pulumi.set(self, "template", value)
@property
@pulumi.getter(name="minReadySeconds")
def min_ready_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Minimum number of seconds for which a newly created pod should be ready without any of its container crashing, for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready)
"""
return pulumi.get(self, "min_ready_seconds")
@min_ready_seconds.setter
def min_ready_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_ready_seconds", value)
@property
@pulumi.getter
def paused(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates that the deployment is paused.
"""
return pulumi.get(self, "paused")
@paused.setter
def paused(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "paused", value)
@property
@pulumi.getter(name="progressDeadlineSeconds")
def progress_deadline_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The maximum time in seconds for a deployment to make progress before it is considered to be failed. The deployment controller will continue to process failed deployments and a condition with a ProgressDeadlineExceeded reason will be surfaced in the deployment status. Note that progress will not be estimated during the time a deployment is paused. Defaults to 600s.
"""
return pulumi.get(self, "progress_deadline_seconds")
@progress_deadline_seconds.setter
def progress_deadline_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "progress_deadline_seconds", value)
@property
@pulumi.getter
def replicas(self) -> Optional[pulumi.Input[int]]:
"""
Number of desired pods. This is a pointer to distinguish between explicit zero and not specified. Defaults to 1.
"""
return pulumi.get(self, "replicas")
@replicas.setter
def replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "replicas", value)
@property
@pulumi.getter(name="revisionHistoryLimit")
def revision_history_limit(self) -> Optional[pulumi.Input[int]]:
"""
The number of old ReplicaSets to retain to allow rollback. This is a pointer to distinguish between explicit zero and not specified. Defaults to 10.
"""
return pulumi.get(self, "revision_history_limit")
@revision_history_limit.setter
def revision_history_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "revision_history_limit", value)
@property
@pulumi.getter
def strategy(self) -> Optional[pulumi.Input['DeploymentStrategyArgs']]:
"""
The deployment strategy to use to replace existing pods with new ones.
"""
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: Optional[pulumi.Input['DeploymentStrategyArgs']]):
pulumi.set(self, "strategy", value)
@pulumi.input_type
class DeploymentStatusArgs:
def __init__(__self__, *,
available_replicas: Optional[pulumi.Input[int]] = None,
collision_count: Optional[pulumi.Input[int]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentConditionArgs']]]] = None,
observed_generation: Optional[pulumi.Input[int]] = None,
ready_replicas: Optional[pulumi.Input[int]] = None,
replicas: Optional[pulumi.Input[int]] = None,
unavailable_replicas: Optional[pulumi.Input[int]] = None,
updated_replicas: Optional[pulumi.Input[int]] = None):
"""
DeploymentStatus is the most recently observed status of the Deployment.
:param pulumi.Input[int] available_replicas: Total number of available pods (ready for at least minReadySeconds) targeted by this deployment.
:param pulumi.Input[int] collision_count: Count of hash collisions for the Deployment. The Deployment controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ReplicaSet.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentConditionArgs']]] conditions: Represents the latest available observations of a deployment's current state.
:param pulumi.Input[int] observed_generation: The generation observed by the deployment controller.
:param pulumi.Input[int] ready_replicas: readyReplicas is the number of pods targeted by this Deployment with a Ready Condition.
:param pulumi.Input[int] replicas: Total number of non-terminated pods targeted by this deployment (their labels match the selector).
:param pulumi.Input[int] unavailable_replicas: Total number of unavailable pods targeted by this deployment. This is the total number of pods that are still required for the deployment to have 100% available capacity. They may either be pods that are running but not yet available or pods that still have not been created.
:param pulumi.Input[int] updated_replicas: Total number of non-terminated pods targeted by this deployment that have the desired template spec.
"""
if available_replicas is not None:
pulumi.set(__self__, "available_replicas", available_replicas)
if collision_count is not None:
pulumi.set(__self__, "collision_count", collision_count)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if observed_generation is not None:
pulumi.set(__self__, "observed_generation", observed_generation)
if ready_replicas is not None:
pulumi.set(__self__, "ready_replicas", ready_replicas)
if replicas is not None:
pulumi.set(__self__, "replicas", replicas)
if unavailable_replicas is not None:
pulumi.set(__self__, "unavailable_replicas", unavailable_replicas)
if updated_replicas is not None:
pulumi.set(__self__, "updated_replicas", updated_replicas)
@property
@pulumi.getter(name="availableReplicas")
def available_replicas(self) -> Optional[pulumi.Input[int]]:
"""
Total number of available pods (ready for at least minReadySeconds) targeted by this deployment.
"""
return pulumi.get(self, "available_replicas")
@available_replicas.setter
def available_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "available_replicas", value)
@property
@pulumi.getter(name="collisionCount")
def collision_count(self) -> Optional[pulumi.Input[int]]:
"""
Count of hash collisions for the Deployment. The Deployment controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ReplicaSet.
"""
return pulumi.get(self, "collision_count")
@collision_count.setter
def collision_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "collision_count", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentConditionArgs']]]]:
"""
Represents the latest available observations of a deployment's current state.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="observedGeneration")
def observed_generation(self) -> Optional[pulumi.Input[int]]:
"""
The generation observed by the deployment controller.
"""
return pulumi.get(self, "observed_generation")
@observed_generation.setter
def observed_generation(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "observed_generation", value)
@property
@pulumi.getter(name="readyReplicas")
def ready_replicas(self) -> Optional[pulumi.Input[int]]:
"""
readyReplicas is the number of pods targeted by this Deployment with a Ready Condition.
"""
return pulumi.get(self, "ready_replicas")
@ready_replicas.setter
def ready_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ready_replicas", value)
@property
@pulumi.getter
def replicas(self) -> Optional[pulumi.Input[int]]:
"""
Total number of non-terminated pods targeted by this deployment (their labels match the selector).
"""
return pulumi.get(self, "replicas")
@replicas.setter
def replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "replicas", value)
@property
@pulumi.getter(name="unavailableReplicas")
def unavailable_replicas(self) -> Optional[pulumi.Input[int]]:
"""
Total number of unavailable pods targeted by this deployment. This is the total number of pods that are still required for the deployment to have 100% available capacity. They may either be pods that are running but not yet available or pods that still have not been created.
"""
return pulumi.get(self, "unavailable_replicas")
@unavailable_replicas.setter
def unavailable_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "unavailable_replicas", value)
@property
@pulumi.getter(name="updatedReplicas")
def updated_replicas(self) -> Optional[pulumi.Input[int]]:
"""
Total number of non-terminated pods targeted by this deployment that have the desired template spec.
"""
return pulumi.get(self, "updated_replicas")
@updated_replicas.setter
def updated_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "updated_replicas", value)
@pulumi.input_type
class DeploymentStrategyArgs:
def __init__(__self__, *,
rolling_update: Optional[pulumi.Input['RollingUpdateDeploymentArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
"""
DeploymentStrategy describes how to replace existing pods with new ones.
:param pulumi.Input['RollingUpdateDeploymentArgs'] rolling_update: Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:param pulumi.Input[str] type: Type of deployment. Can be "Recreate" or "RollingUpdate". Default is RollingUpdate.
Possible enum values:
- `"Recreate"` Kill all existing pods before creating new ones.
- `"RollingUpdate"` Replace the old ReplicaSets by new one using rolling update i.e gradually scale down the old ReplicaSets and scale up the new one.
"""
if rolling_update is not None:
pulumi.set(__self__, "rolling_update", rolling_update)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="rollingUpdate")
def rolling_update(self) -> Optional[pulumi.Input['RollingUpdateDeploymentArgs']]:
"""
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
"""
return pulumi.get(self, "rolling_update")
@rolling_update.setter
def rolling_update(self, value: Optional[pulumi.Input['RollingUpdateDeploymentArgs']]):
pulumi.set(self, "rolling_update", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type of deployment. Can be "Recreate" or "RollingUpdate". Default is RollingUpdate.
Possible enum values:
- `"Recreate"` Kill all existing pods before creating new ones.
- `"RollingUpdate"` Replace the old ReplicaSets by new one using rolling update i.e gradually scale down the old ReplicaSets and scale up the new one.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class DeploymentArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
spec: Optional[pulumi.Input['DeploymentSpecArgs']] = None,
status: Optional[pulumi.Input['DeploymentStatusArgs']] = None):
"""
Deployment enables declarative updates for Pods and ReplicaSets.
This resource waits until its status is ready before registering success
for create/update, and populating output properties from the current state of the resource.
The following conditions are used to determine whether the resource creation has
succeeded or failed:
1. The Deployment has begun to be updated by the Deployment controller. If the current
generation of the Deployment is > 1, then this means that the current generation must
be different from the generation reported by the last outputs.
2. There exists a ReplicaSet whose revision is equal to the current revision of the
Deployment.
3. The Deployment's '.status.conditions' has a status of type 'Available' whose 'status'
member is set to 'True'.
4. If the Deployment has generation > 1, then '.status.conditions' has a status of type
'Progressing', whose 'status' member is set to 'True', and whose 'reason' is
'NewReplicaSetAvailable'. For generation <= 1, this status field does not exist,
because it doesn't do a rollout (i.e., it simply creates the Deployment and
corresponding ReplicaSet), and therefore there is no rollout to mark as 'Progressing'.
If the Deployment has not reached a Ready state after 10 minutes, it will
time out and mark the resource update as Failed. You can override the default timeout value
by setting the 'customTimeouts' option on the resource.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['DeploymentSpecArgs'] spec: Specification of the desired behavior of the Deployment.
:param pulumi.Input['DeploymentStatusArgs'] status: Most recently observed status of the Deployment.
"""
if api_version is not None:
pulumi.set(__self__, "api_version", 'apps/v1')
if kind is not None:
pulumi.set(__self__, "kind", 'Deployment')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['DeploymentSpecArgs']]:
"""
Specification of the desired behavior of the Deployment.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['DeploymentSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['DeploymentStatusArgs']]:
"""
Most recently observed status of the Deployment.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['DeploymentStatusArgs']]):
pulumi.set(self, "status", value)
@pulumi.input_type
class ReplicaSetConditionArgs:
def __init__(__self__, *,
status: pulumi.Input[str],
type: pulumi.Input[str],
last_transition_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None):
"""
ReplicaSetCondition describes the state of a replica set at a certain point.
:param pulumi.Input[str] status: Status of the condition, one of True, False, Unknown.
:param pulumi.Input[str] type: Type of replica set condition.
:param pulumi.Input[str] last_transition_time: The last time the condition transitioned from one status to another.
:param pulumi.Input[str] message: A human readable message indicating details about the transition.
:param pulumi.Input[str] reason: The reason for the condition's last transition.
"""
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "type", type)
if last_transition_time is not None:
pulumi.set(__self__, "last_transition_time", last_transition_time)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def status(self) -> pulumi.Input[str]:
"""
Status of the condition, one of True, False, Unknown.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: pulumi.Input[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Type of replica set condition.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="lastTransitionTime")
def last_transition_time(self) -> Optional[pulumi.Input[str]]:
"""
The last time the condition transitioned from one status to another.
"""
return pulumi.get(self, "last_transition_time")
@last_transition_time.setter
def last_transition_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_transition_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
A human readable message indicating details about the transition.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
"""
The reason for the condition's last transition.
"""
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@pulumi.input_type
class ReplicaSetSpecArgs:
def __init__(__self__, *,
selector: pulumi.Input['_meta.v1.LabelSelectorArgs'],
min_ready_seconds: Optional[pulumi.Input[int]] = None,
replicas: Optional[pulumi.Input[int]] = None,
template: Optional[pulumi.Input['_core.v1.PodTemplateSpecArgs']] = None):
"""
ReplicaSetSpec is the specification of a ReplicaSet.
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] selector: Selector is a label query over pods that should match the replica count. Label keys and values that must match in order to be controlled by this replica set. It must match the pod template's labels. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
:param pulumi.Input[int] min_ready_seconds: Minimum number of seconds for which a newly created pod should be ready without any of its container crashing, for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready)
:param pulumi.Input[int] replicas: Replicas is the number of desired replicas. This is a pointer to distinguish between explicit zero and unspecified. Defaults to 1. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller/#what-is-a-replicationcontroller
:param pulumi.Input['_core.v1.PodTemplateSpecArgs'] template: Template is the object that describes the pod that will be created if insufficient replicas are detected. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller#pod-template
"""
pulumi.set(__self__, "selector", selector)
if min_ready_seconds is not None:
pulumi.set(__self__, "min_ready_seconds", min_ready_seconds)
if replicas is not None:
pulumi.set(__self__, "replicas", replicas)
if template is not None:
pulumi.set(__self__, "template", template)
@property
@pulumi.getter
def selector(self) -> pulumi.Input['_meta.v1.LabelSelectorArgs']:
"""
Selector is a label query over pods that should match the replica count. Label keys and values that must match in order to be controlled by this replica set. It must match the pod template's labels. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: pulumi.Input['_meta.v1.LabelSelectorArgs']):
pulumi.set(self, "selector", value)
@property
@pulumi.getter(name="minReadySeconds")
def min_ready_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Minimum number of seconds for which a newly created pod should be ready without any of its container crashing, for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready)
"""
return pulumi.get(self, "min_ready_seconds")
@min_ready_seconds.setter
def min_ready_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_ready_seconds", value)
@property
@pulumi.getter
def replicas(self) -> Optional[pulumi.Input[int]]:
"""
Replicas is the number of desired replicas. This is a pointer to distinguish between explicit zero and unspecified. Defaults to 1. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller/#what-is-a-replicationcontroller
"""
return pulumi.get(self, "replicas")
@replicas.setter
def replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "replicas", value)
@property
@pulumi.getter
def template(self) -> Optional[pulumi.Input['_core.v1.PodTemplateSpecArgs']]:
"""
Template is the object that describes the pod that will be created if insufficient replicas are detected. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller#pod-template
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: Optional[pulumi.Input['_core.v1.PodTemplateSpecArgs']]):
pulumi.set(self, "template", value)
@pulumi.input_type
class ReplicaSetStatusArgs:
def __init__(__self__, *,
replicas: pulumi.Input[int],
available_replicas: Optional[pulumi.Input[int]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['ReplicaSetConditionArgs']]]] = None,
fully_labeled_replicas: Optional[pulumi.Input[int]] = None,
observed_generation: Optional[pulumi.Input[int]] = None,
ready_replicas: Optional[pulumi.Input[int]] = None):
"""
ReplicaSetStatus represents the current status of a ReplicaSet.
:param pulumi.Input[int] replicas: Replicas is the most recently oberved number of replicas. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller/#what-is-a-replicationcontroller
:param pulumi.Input[int] available_replicas: The number of available replicas (ready for at least minReadySeconds) for this replica set.
:param pulumi.Input[Sequence[pulumi.Input['ReplicaSetConditionArgs']]] conditions: Represents the latest available observations of a replica set's current state.
:param pulumi.Input[int] fully_labeled_replicas: The number of pods that have labels matching the labels of the pod template of the replicaset.
:param pulumi.Input[int] observed_generation: ObservedGeneration reflects the generation of the most recently observed ReplicaSet.
:param pulumi.Input[int] ready_replicas: readyReplicas is the number of pods targeted by this ReplicaSet with a Ready Condition.
"""
pulumi.set(__self__, "replicas", replicas)
if available_replicas is not None:
pulumi.set(__self__, "available_replicas", available_replicas)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if fully_labeled_replicas is not None:
pulumi.set(__self__, "fully_labeled_replicas", fully_labeled_replicas)
if observed_generation is not None:
pulumi.set(__self__, "observed_generation", observed_generation)
if ready_replicas is not None:
pulumi.set(__self__, "ready_replicas", ready_replicas)
@property
@pulumi.getter
def replicas(self) -> pulumi.Input[int]:
"""
Replicas is the most recently oberved number of replicas. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller/#what-is-a-replicationcontroller
"""
return pulumi.get(self, "replicas")
@replicas.setter
def replicas(self, value: pulumi.Input[int]):
pulumi.set(self, "replicas", value)
@property
@pulumi.getter(name="availableReplicas")
def available_replicas(self) -> Optional[pulumi.Input[int]]:
"""
The number of available replicas (ready for at least minReadySeconds) for this replica set.
"""
return pulumi.get(self, "available_replicas")
@available_replicas.setter
def available_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "available_replicas", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ReplicaSetConditionArgs']]]]:
"""
Represents the latest available observations of a replica set's current state.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ReplicaSetConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="fullyLabeledReplicas")
def fully_labeled_replicas(self) -> Optional[pulumi.Input[int]]:
"""
The number of pods that have labels matching the labels of the pod template of the replicaset.
"""
return pulumi.get(self, "fully_labeled_replicas")
@fully_labeled_replicas.setter
def fully_labeled_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "fully_labeled_replicas", value)
@property
@pulumi.getter(name="observedGeneration")
def observed_generation(self) -> Optional[pulumi.Input[int]]:
"""
ObservedGeneration reflects the generation of the most recently observed ReplicaSet.
"""
return pulumi.get(self, "observed_generation")
@observed_generation.setter
def observed_generation(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "observed_generation", value)
@property
@pulumi.getter(name="readyReplicas")
def ready_replicas(self) -> Optional[pulumi.Input[int]]:
"""
readyReplicas is the number of pods targeted by this ReplicaSet with a Ready Condition.
"""
return pulumi.get(self, "ready_replicas")
@ready_replicas.setter
def ready_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ready_replicas", value)
@pulumi.input_type
class ReplicaSetArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
spec: Optional[pulumi.Input['ReplicaSetSpecArgs']] = None,
status: Optional[pulumi.Input['ReplicaSetStatusArgs']] = None):
"""
ReplicaSet ensures that a specified number of pod replicas are running at any given time.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: If the Labels of a ReplicaSet are empty, they are defaulted to be the same as the Pod(s) that the ReplicaSet manages. Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['ReplicaSetSpecArgs'] spec: Spec defines the specification of the desired behavior of the ReplicaSet. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
:param pulumi.Input['ReplicaSetStatusArgs'] status: Status is the most recently observed status of the ReplicaSet. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
if api_version is not None:
pulumi.set(__self__, "api_version", 'apps/v1')
if kind is not None:
pulumi.set(__self__, "kind", 'ReplicaSet')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
If the Labels of a ReplicaSet are empty, they are defaulted to be the same as the Pod(s) that the ReplicaSet manages. Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['ReplicaSetSpecArgs']]:
"""
Spec defines the specification of the desired behavior of the ReplicaSet. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['ReplicaSetSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['ReplicaSetStatusArgs']]:
"""
Status is the most recently observed status of the ReplicaSet. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['ReplicaSetStatusArgs']]):
pulumi.set(self, "status", value)
@pulumi.input_type
class RollingUpdateDaemonSetArgs:
def __init__(__self__, *,
max_surge: Optional[pulumi.Input[Union[int, str]]] = None,
max_unavailable: Optional[pulumi.Input[Union[int, str]]] = None):
"""
Spec to control the desired behavior of daemon set rolling update.
:param pulumi.Input[Union[int, str]] max_surge: The maximum number of nodes with an existing available DaemonSet pod that can have an updated DaemonSet pod during during an update. Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%). This can not be 0 if MaxUnavailable is 0. Absolute number is calculated from percentage by rounding up to a minimum of 1. Default value is 0. Example: when this is set to 30%, at most 30% of the total number of nodes that should be running the daemon pod (i.e. status.desiredNumberScheduled) can have their a new pod created before the old pod is marked as deleted. The update starts by launching new pods on 30% of nodes. Once an updated pod is available (Ready for at least minReadySeconds) the old DaemonSet pod on that node is marked deleted. If the old pod becomes unavailable for any reason (Ready transitions to false, is evicted, or is drained) an updated pod is immediatedly created on that node without considering surge limits. Allowing surge implies the possibility that the resources consumed by the daemonset on any given node can double if the readiness check fails, and so resource intensive daemonsets should take into account that they may cause evictions during disruption. This is beta field and enabled/disabled by DaemonSetUpdateSurge feature gate.
:param pulumi.Input[Union[int, str]] max_unavailable: The maximum number of DaemonSet pods that can be unavailable during the update. Value can be an absolute number (ex: 5) or a percentage of total number of DaemonSet pods at the start of the update (ex: 10%). Absolute number is calculated from percentage by rounding up. This cannot be 0 if MaxSurge is 0 Default value is 1. Example: when this is set to 30%, at most 30% of the total number of nodes that should be running the daemon pod (i.e. status.desiredNumberScheduled) can have their pods stopped for an update at any given time. The update starts by stopping at most 30% of those DaemonSet pods and then brings up new DaemonSet pods in their place. Once the new pods are available, it then proceeds onto other DaemonSet pods, thus ensuring that at least 70% of original number of DaemonSet pods are available at all times during the update.
"""
if max_surge is not None:
pulumi.set(__self__, "max_surge", max_surge)
if max_unavailable is not None:
pulumi.set(__self__, "max_unavailable", max_unavailable)
@property
@pulumi.getter(name="maxSurge")
def max_surge(self) -> Optional[pulumi.Input[Union[int, str]]]:
"""
The maximum number of nodes with an existing available DaemonSet pod that can have an updated DaemonSet pod during during an update. Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%). This can not be 0 if MaxUnavailable is 0. Absolute number is calculated from percentage by rounding up to a minimum of 1. Default value is 0. Example: when this is set to 30%, at most 30% of the total number of nodes that should be running the daemon pod (i.e. status.desiredNumberScheduled) can have their a new pod created before the old pod is marked as deleted. The update starts by launching new pods on 30% of nodes. Once an updated pod is available (Ready for at least minReadySeconds) the old DaemonSet pod on that node is marked deleted. If the old pod becomes unavailable for any reason (Ready transitions to false, is evicted, or is drained) an updated pod is immediatedly created on that node without considering surge limits. Allowing surge implies the possibility that the resources consumed by the daemonset on any given node can double if the readiness check fails, and so resource intensive daemonsets should take into account that they may cause evictions during disruption. This is beta field and enabled/disabled by DaemonSetUpdateSurge feature gate.
"""
return pulumi.get(self, "max_surge")
@max_surge.setter
def max_surge(self, value: Optional[pulumi.Input[Union[int, str]]]):
pulumi.set(self, "max_surge", value)
@property
@pulumi.getter(name="maxUnavailable")
def max_unavailable(self) -> Optional[pulumi.Input[Union[int, str]]]:
"""
The maximum number of DaemonSet pods that can be unavailable during the update. Value can be an absolute number (ex: 5) or a percentage of total number of DaemonSet pods at the start of the update (ex: 10%). Absolute number is calculated from percentage by rounding up. This cannot be 0 if MaxSurge is 0 Default value is 1. Example: when this is set to 30%, at most 30% of the total number of nodes that should be running the daemon pod (i.e. status.desiredNumberScheduled) can have their pods stopped for an update at any given time. The update starts by stopping at most 30% of those DaemonSet pods and then brings up new DaemonSet pods in their place. Once the new pods are available, it then proceeds onto other DaemonSet pods, thus ensuring that at least 70% of original number of DaemonSet pods are available at all times during the update.
"""
return pulumi.get(self, "max_unavailable")
@max_unavailable.setter
def max_unavailable(self, value: Optional[pulumi.Input[Union[int, str]]]):
pulumi.set(self, "max_unavailable", value)
@pulumi.input_type
class RollingUpdateDeploymentArgs:
def __init__(__self__, *,
max_surge: Optional[pulumi.Input[Union[int, str]]] = None,
max_unavailable: Optional[pulumi.Input[Union[int, str]]] = None):
"""
Spec to control the desired behavior of rolling update.
:param pulumi.Input[Union[int, str]] max_surge: The maximum number of pods that can be scheduled above the desired number of pods. Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%). This can not be 0 if MaxUnavailable is 0. Absolute number is calculated from percentage by rounding up. Defaults to 25%. Example: when this is set to 30%, the new ReplicaSet can be scaled up immediately when the rolling update starts, such that the total number of old and new pods do not exceed 130% of desired pods. Once old pods have been killed, new ReplicaSet can be scaled up further, ensuring that total number of pods running at any time during the update is at most 130% of desired pods.
:param pulumi.Input[Union[int, str]] max_unavailable: The maximum number of pods that can be unavailable during the update. Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%). Absolute number is calculated from percentage by rounding down. This can not be 0 if MaxSurge is 0. Defaults to 25%. Example: when this is set to 30%, the old ReplicaSet can be scaled down to 70% of desired pods immediately when the rolling update starts. Once new pods are ready, old ReplicaSet can be scaled down further, followed by scaling up the new ReplicaSet, ensuring that the total number of pods available at all times during the update is at least 70% of desired pods.
"""
if max_surge is not None:
pulumi.set(__self__, "max_surge", max_surge)
if max_unavailable is not None:
pulumi.set(__self__, "max_unavailable", max_unavailable)
@property
@pulumi.getter(name="maxSurge")
def max_surge(self) -> Optional[pulumi.Input[Union[int, str]]]:
"""
The maximum number of pods that can be scheduled above the desired number of pods. Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%). This can not be 0 if MaxUnavailable is 0. Absolute number is calculated from percentage by rounding up. Defaults to 25%. Example: when this is set to 30%, the new ReplicaSet can be scaled up immediately when the rolling update starts, such that the total number of old and new pods do not exceed 130% of desired pods. Once old pods have been killed, new ReplicaSet can be scaled up further, ensuring that total number of pods running at any time during the update is at most 130% of desired pods.
"""
return pulumi.get(self, "max_surge")
@max_surge.setter
def max_surge(self, value: Optional[pulumi.Input[Union[int, str]]]):
pulumi.set(self, "max_surge", value)
@property
@pulumi.getter(name="maxUnavailable")
def max_unavailable(self) -> Optional[pulumi.Input[Union[int, str]]]:
"""
The maximum number of pods that can be unavailable during the update. Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%). Absolute number is calculated from percentage by rounding down. This can not be 0 if MaxSurge is 0. Defaults to 25%. Example: when this is set to 30%, the old ReplicaSet can be scaled down to 70% of desired pods immediately when the rolling update starts. Once new pods are ready, old ReplicaSet can be scaled down further, followed by scaling up the new ReplicaSet, ensuring that the total number of pods available at all times during the update is at least 70% of desired pods.
"""
return pulumi.get(self, "max_unavailable")
@max_unavailable.setter
def max_unavailable(self, value: Optional[pulumi.Input[Union[int, str]]]):
pulumi.set(self, "max_unavailable", value)
@pulumi.input_type
class RollingUpdateStatefulSetStrategyArgs:
def __init__(__self__, *,
partition: Optional[pulumi.Input[int]] = None):
"""
RollingUpdateStatefulSetStrategy is used to communicate parameter for RollingUpdateStatefulSetStrategyType.
:param pulumi.Input[int] partition: Partition indicates the ordinal at which the StatefulSet should be partitioned. Default value is 0.
"""
if partition is not None:
pulumi.set(__self__, "partition", partition)
@property
@pulumi.getter
def partition(self) -> Optional[pulumi.Input[int]]:
"""
Partition indicates the ordinal at which the StatefulSet should be partitioned. Default value is 0.
"""
return pulumi.get(self, "partition")
@partition.setter
def partition(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "partition", value)
@pulumi.input_type
class StatefulSetConditionArgs:
def __init__(__self__, *,
status: pulumi.Input[str],
type: pulumi.Input[str],
last_transition_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None):
"""
StatefulSetCondition describes the state of a statefulset at a certain point.
:param pulumi.Input[str] status: Status of the condition, one of True, False, Unknown.
:param pulumi.Input[str] type: Type of statefulset condition.
:param pulumi.Input[str] last_transition_time: Last time the condition transitioned from one status to another.
:param pulumi.Input[str] message: A human readable message indicating details about the transition.
:param pulumi.Input[str] reason: The reason for the condition's last transition.
"""
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "type", type)
if last_transition_time is not None:
pulumi.set(__self__, "last_transition_time", last_transition_time)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def status(self) -> pulumi.Input[str]:
"""
Status of the condition, one of True, False, Unknown.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: pulumi.Input[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Type of statefulset condition.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="lastTransitionTime")
def last_transition_time(self) -> Optional[pulumi.Input[str]]:
"""
Last time the condition transitioned from one status to another.
"""
return pulumi.get(self, "last_transition_time")
@last_transition_time.setter
def last_transition_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_transition_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
A human readable message indicating details about the transition.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
"""
The reason for the condition's last transition.
"""
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@pulumi.input_type
class StatefulSetPersistentVolumeClaimRetentionPolicyArgs:
def __init__(__self__, *,
when_deleted: Optional[pulumi.Input[str]] = None,
when_scaled: Optional[pulumi.Input[str]] = None):
"""
StatefulSetPersistentVolumeClaimRetentionPolicy describes the policy used for PVCs created from the StatefulSet VolumeClaimTemplates.
:param pulumi.Input[str] when_deleted: WhenDeleted specifies what happens to PVCs created from StatefulSet VolumeClaimTemplates when the StatefulSet is deleted. The default policy of `Retain` causes PVCs to not be affected by StatefulSet deletion. The `Delete` policy causes those PVCs to be deleted.
:param pulumi.Input[str] when_scaled: WhenScaled specifies what happens to PVCs created from StatefulSet VolumeClaimTemplates when the StatefulSet is scaled down. The default policy of `Retain` causes PVCs to not be affected by a scaledown. The `Delete` policy causes the associated PVCs for any excess pods above the replica count to be deleted.
"""
if when_deleted is not None:
pulumi.set(__self__, "when_deleted", when_deleted)
if when_scaled is not None:
pulumi.set(__self__, "when_scaled", when_scaled)
@property
@pulumi.getter(name="whenDeleted")
def when_deleted(self) -> Optional[pulumi.Input[str]]:
"""
WhenDeleted specifies what happens to PVCs created from StatefulSet VolumeClaimTemplates when the StatefulSet is deleted. The default policy of `Retain` causes PVCs to not be affected by StatefulSet deletion. The `Delete` policy causes those PVCs to be deleted.
"""
return pulumi.get(self, "when_deleted")
@when_deleted.setter
def when_deleted(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "when_deleted", value)
@property
@pulumi.getter(name="whenScaled")
def when_scaled(self) -> Optional[pulumi.Input[str]]:
"""
WhenScaled specifies what happens to PVCs created from StatefulSet VolumeClaimTemplates when the StatefulSet is scaled down. The default policy of `Retain` causes PVCs to not be affected by a scaledown. The `Delete` policy causes the associated PVCs for any excess pods above the replica count to be deleted.
"""
return pulumi.get(self, "when_scaled")
@when_scaled.setter
def when_scaled(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "when_scaled", value)
@pulumi.input_type
class StatefulSetSpecArgs:
def __init__(__self__, *,
selector: pulumi.Input['_meta.v1.LabelSelectorArgs'],
service_name: pulumi.Input[str],
template: pulumi.Input['_core.v1.PodTemplateSpecArgs'],
min_ready_seconds: Optional[pulumi.Input[int]] = None,
persistent_volume_claim_retention_policy: Optional[pulumi.Input['StatefulSetPersistentVolumeClaimRetentionPolicyArgs']] = None,
pod_management_policy: Optional[pulumi.Input[str]] = None,
replicas: Optional[pulumi.Input[int]] = None,
revision_history_limit: Optional[pulumi.Input[int]] = None,
update_strategy: Optional[pulumi.Input['StatefulSetUpdateStrategyArgs']] = None,
volume_claim_templates: Optional[pulumi.Input[Sequence[pulumi.Input['_core.v1.PersistentVolumeClaimArgs']]]] = None):
"""
A StatefulSetSpec is the specification of a StatefulSet.
:param pulumi.Input['_meta.v1.LabelSelectorArgs'] selector: selector is a label query over pods that should match the replica count. It must match the pod template's labels. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
:param pulumi.Input[str] service_name: serviceName is the name of the service that governs this StatefulSet. This service must exist before the StatefulSet, and is responsible for the network identity of the set. Pods get DNS/hostnames that follow the pattern: pod-specific-string.serviceName.default.svc.cluster.local where "pod-specific-string" is managed by the StatefulSet controller.
:param pulumi.Input['_core.v1.PodTemplateSpecArgs'] template: template is the object that describes the pod that will be created if insufficient replicas are detected. Each pod stamped out by the StatefulSet will fulfill this Template, but have a unique identity from the rest of the StatefulSet.
:param pulumi.Input[int] min_ready_seconds: Minimum number of seconds for which a newly created pod should be ready without any of its container crashing for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready) This is an alpha field and requires enabling StatefulSetMinReadySeconds feature gate.
:param pulumi.Input['StatefulSetPersistentVolumeClaimRetentionPolicyArgs'] persistent_volume_claim_retention_policy: persistentVolumeClaimRetentionPolicy describes the lifecycle of persistent volume claims created from volumeClaimTemplates. By default, all persistent volume claims are created as needed and retained until manually deleted. This policy allows the lifecycle to be altered, for example by deleting persistent volume claims when their stateful set is deleted, or when their pod is scaled down. This requires the StatefulSetAutoDeletePVC feature gate to be enabled, which is alpha. +optional
:param pulumi.Input[str] pod_management_policy: podManagementPolicy controls how pods are created during initial scale up, when replacing pods on nodes, or when scaling down. The default policy is `OrderedReady`, where pods are created in increasing order (pod-0, then pod-1, etc) and the controller will wait until each pod is ready before continuing. When scaling down, the pods are removed in the opposite order. The alternative policy is `Parallel` which will create pods in parallel to match the desired scale without waiting, and on scale down will delete all pods at once.
Possible enum values:
- `"OrderedReady"` will create pods in strictly increasing order on scale up and strictly decreasing order on scale down, progressing only when the previous pod is ready or terminated. At most one pod will be changed at any time.
- `"Parallel"` will create and delete pods as soon as the stateful set replica count is changed, and will not wait for pods to be ready or complete termination.
:param pulumi.Input[int] replicas: replicas is the desired number of replicas of the given Template. These are replicas in the sense that they are instantiations of the same Template, but individual replicas also have a consistent identity. If unspecified, defaults to 1.
:param pulumi.Input[int] revision_history_limit: revisionHistoryLimit is the maximum number of revisions that will be maintained in the StatefulSet's revision history. The revision history consists of all revisions not represented by a currently applied StatefulSetSpec version. The default value is 10.
:param pulumi.Input['StatefulSetUpdateStrategyArgs'] update_strategy: updateStrategy indicates the StatefulSetUpdateStrategy that will be employed to update Pods in the StatefulSet when a revision is made to Template.
:param pulumi.Input[Sequence[pulumi.Input['_core.v1.PersistentVolumeClaimArgs']]] volume_claim_templates: volumeClaimTemplates is a list of claims that pods are allowed to reference. The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod. Every claim in this list must have at least one matching (by name) volumeMount in one container in the template. A claim in this list takes precedence over any volumes in the template, with the same name.
"""
pulumi.set(__self__, "selector", selector)
pulumi.set(__self__, "service_name", service_name)
pulumi.set(__self__, "template", template)
if min_ready_seconds is not None:
pulumi.set(__self__, "min_ready_seconds", min_ready_seconds)
if persistent_volume_claim_retention_policy is not None:
pulumi.set(__self__, "persistent_volume_claim_retention_policy", persistent_volume_claim_retention_policy)
if pod_management_policy is not None:
pulumi.set(__self__, "pod_management_policy", pod_management_policy)
if replicas is not None:
pulumi.set(__self__, "replicas", replicas)
if revision_history_limit is not None:
pulumi.set(__self__, "revision_history_limit", revision_history_limit)
if update_strategy is not None:
pulumi.set(__self__, "update_strategy", update_strategy)
if volume_claim_templates is not None:
pulumi.set(__self__, "volume_claim_templates", volume_claim_templates)
@property
@pulumi.getter
def selector(self) -> pulumi.Input['_meta.v1.LabelSelectorArgs']:
"""
selector is a label query over pods that should match the replica count. It must match the pod template's labels. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
"""
return pulumi.get(self, "selector")
@selector.setter
def selector(self, value: pulumi.Input['_meta.v1.LabelSelectorArgs']):
pulumi.set(self, "selector", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
serviceName is the name of the service that governs this StatefulSet. This service must exist before the StatefulSet, and is responsible for the network identity of the set. Pods get DNS/hostnames that follow the pattern: pod-specific-string.serviceName.default.svc.cluster.local where "pod-specific-string" is managed by the StatefulSet controller.
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter
def template(self) -> pulumi.Input['_core.v1.PodTemplateSpecArgs']:
"""
template is the object that describes the pod that will be created if insufficient replicas are detected. Each pod stamped out by the StatefulSet will fulfill this Template, but have a unique identity from the rest of the StatefulSet.
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: pulumi.Input['_core.v1.PodTemplateSpecArgs']):
pulumi.set(self, "template", value)
@property
@pulumi.getter(name="minReadySeconds")
def min_ready_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Minimum number of seconds for which a newly created pod should be ready without any of its container crashing for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready) This is an alpha field and requires enabling StatefulSetMinReadySeconds feature gate.
"""
return pulumi.get(self, "min_ready_seconds")
@min_ready_seconds.setter
def min_ready_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_ready_seconds", value)
@property
@pulumi.getter(name="persistentVolumeClaimRetentionPolicy")
def persistent_volume_claim_retention_policy(self) -> Optional[pulumi.Input['StatefulSetPersistentVolumeClaimRetentionPolicyArgs']]:
"""
persistentVolumeClaimRetentionPolicy describes the lifecycle of persistent volume claims created from volumeClaimTemplates. By default, all persistent volume claims are created as needed and retained until manually deleted. This policy allows the lifecycle to be altered, for example by deleting persistent volume claims when their stateful set is deleted, or when their pod is scaled down. This requires the StatefulSetAutoDeletePVC feature gate to be enabled, which is alpha. +optional
"""
return pulumi.get(self, "persistent_volume_claim_retention_policy")
@persistent_volume_claim_retention_policy.setter
def persistent_volume_claim_retention_policy(self, value: Optional[pulumi.Input['StatefulSetPersistentVolumeClaimRetentionPolicyArgs']]):
pulumi.set(self, "persistent_volume_claim_retention_policy", value)
@property
@pulumi.getter(name="podManagementPolicy")
def pod_management_policy(self) -> Optional[pulumi.Input[str]]:
"""
podManagementPolicy controls how pods are created during initial scale up, when replacing pods on nodes, or when scaling down. The default policy is `OrderedReady`, where pods are created in increasing order (pod-0, then pod-1, etc) and the controller will wait until each pod is ready before continuing. When scaling down, the pods are removed in the opposite order. The alternative policy is `Parallel` which will create pods in parallel to match the desired scale without waiting, and on scale down will delete all pods at once.
Possible enum values:
- `"OrderedReady"` will create pods in strictly increasing order on scale up and strictly decreasing order on scale down, progressing only when the previous pod is ready or terminated. At most one pod will be changed at any time.
- `"Parallel"` will create and delete pods as soon as the stateful set replica count is changed, and will not wait for pods to be ready or complete termination.
"""
return pulumi.get(self, "pod_management_policy")
@pod_management_policy.setter
def pod_management_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pod_management_policy", value)
@property
@pulumi.getter
def replicas(self) -> Optional[pulumi.Input[int]]:
"""
replicas is the desired number of replicas of the given Template. These are replicas in the sense that they are instantiations of the same Template, but individual replicas also have a consistent identity. If unspecified, defaults to 1.
"""
return pulumi.get(self, "replicas")
@replicas.setter
def replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "replicas", value)
@property
@pulumi.getter(name="revisionHistoryLimit")
def revision_history_limit(self) -> Optional[pulumi.Input[int]]:
"""
revisionHistoryLimit is the maximum number of revisions that will be maintained in the StatefulSet's revision history. The revision history consists of all revisions not represented by a currently applied StatefulSetSpec version. The default value is 10.
"""
return pulumi.get(self, "revision_history_limit")
@revision_history_limit.setter
def revision_history_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "revision_history_limit", value)
@property
@pulumi.getter(name="updateStrategy")
def update_strategy(self) -> Optional[pulumi.Input['StatefulSetUpdateStrategyArgs']]:
"""
updateStrategy indicates the StatefulSetUpdateStrategy that will be employed to update Pods in the StatefulSet when a revision is made to Template.
"""
return pulumi.get(self, "update_strategy")
@update_strategy.setter
def update_strategy(self, value: Optional[pulumi.Input['StatefulSetUpdateStrategyArgs']]):
pulumi.set(self, "update_strategy", value)
@property
@pulumi.getter(name="volumeClaimTemplates")
def volume_claim_templates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['_core.v1.PersistentVolumeClaimArgs']]]]:
"""
volumeClaimTemplates is a list of claims that pods are allowed to reference. The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod. Every claim in this list must have at least one matching (by name) volumeMount in one container in the template. A claim in this list takes precedence over any volumes in the template, with the same name.
"""
return pulumi.get(self, "volume_claim_templates")
@volume_claim_templates.setter
def volume_claim_templates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['_core.v1.PersistentVolumeClaimArgs']]]]):
pulumi.set(self, "volume_claim_templates", value)
@pulumi.input_type
class StatefulSetStatusArgs:
def __init__(__self__, *,
available_replicas: pulumi.Input[int],
replicas: pulumi.Input[int],
collision_count: Optional[pulumi.Input[int]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['StatefulSetConditionArgs']]]] = None,
current_replicas: Optional[pulumi.Input[int]] = None,
current_revision: Optional[pulumi.Input[str]] = None,
observed_generation: Optional[pulumi.Input[int]] = None,
ready_replicas: Optional[pulumi.Input[int]] = None,
update_revision: Optional[pulumi.Input[str]] = None,
updated_replicas: Optional[pulumi.Input[int]] = None):
"""
StatefulSetStatus represents the current state of a StatefulSet.
:param pulumi.Input[int] available_replicas: Total number of available pods (ready for at least minReadySeconds) targeted by this statefulset. This is a beta field and enabled/disabled by StatefulSetMinReadySeconds feature gate.
:param pulumi.Input[int] replicas: replicas is the number of Pods created by the StatefulSet controller.
:param pulumi.Input[int] collision_count: collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
:param pulumi.Input[Sequence[pulumi.Input['StatefulSetConditionArgs']]] conditions: Represents the latest available observations of a statefulset's current state.
:param pulumi.Input[int] current_replicas: currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision.
:param pulumi.Input[str] current_revision: currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas).
:param pulumi.Input[int] observed_generation: observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server.
:param pulumi.Input[int] ready_replicas: readyReplicas is the number of pods created for this StatefulSet with a Ready Condition.
:param pulumi.Input[str] update_revision: updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas)
:param pulumi.Input[int] updated_replicas: updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision.
"""
pulumi.set(__self__, "available_replicas", available_replicas)
pulumi.set(__self__, "replicas", replicas)
if collision_count is not None:
pulumi.set(__self__, "collision_count", collision_count)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if current_replicas is not None:
pulumi.set(__self__, "current_replicas", current_replicas)
if current_revision is not None:
pulumi.set(__self__, "current_revision", current_revision)
if observed_generation is not None:
pulumi.set(__self__, "observed_generation", observed_generation)
if ready_replicas is not None:
pulumi.set(__self__, "ready_replicas", ready_replicas)
if update_revision is not None:
pulumi.set(__self__, "update_revision", update_revision)
if updated_replicas is not None:
pulumi.set(__self__, "updated_replicas", updated_replicas)
@property
@pulumi.getter(name="availableReplicas")
def available_replicas(self) -> pulumi.Input[int]:
"""
Total number of available pods (ready for at least minReadySeconds) targeted by this statefulset. This is a beta field and enabled/disabled by StatefulSetMinReadySeconds feature gate.
"""
return pulumi.get(self, "available_replicas")
@available_replicas.setter
def available_replicas(self, value: pulumi.Input[int]):
pulumi.set(self, "available_replicas", value)
@property
@pulumi.getter
def replicas(self) -> pulumi.Input[int]:
"""
replicas is the number of Pods created by the StatefulSet controller.
"""
return pulumi.get(self, "replicas")
@replicas.setter
def replicas(self, value: pulumi.Input[int]):
pulumi.set(self, "replicas", value)
@property
@pulumi.getter(name="collisionCount")
def collision_count(self) -> Optional[pulumi.Input[int]]:
"""
collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
"""
return pulumi.get(self, "collision_count")
@collision_count.setter
def collision_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "collision_count", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['StatefulSetConditionArgs']]]]:
"""
Represents the latest available observations of a statefulset's current state.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['StatefulSetConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="currentReplicas")
def current_replicas(self) -> Optional[pulumi.Input[int]]:
"""
currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision.
"""
return pulumi.get(self, "current_replicas")
@current_replicas.setter
def current_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "current_replicas", value)
@property
@pulumi.getter(name="currentRevision")
def current_revision(self) -> Optional[pulumi.Input[str]]:
"""
currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas).
"""
return pulumi.get(self, "current_revision")
@current_revision.setter
def current_revision(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "current_revision", value)
@property
@pulumi.getter(name="observedGeneration")
def observed_generation(self) -> Optional[pulumi.Input[int]]:
"""
observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server.
"""
return pulumi.get(self, "observed_generation")
@observed_generation.setter
def observed_generation(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "observed_generation", value)
@property
@pulumi.getter(name="readyReplicas")
def ready_replicas(self) -> Optional[pulumi.Input[int]]:
"""
readyReplicas is the number of pods created for this StatefulSet with a Ready Condition.
"""
return pulumi.get(self, "ready_replicas")
@ready_replicas.setter
def ready_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ready_replicas", value)
@property
@pulumi.getter(name="updateRevision")
def update_revision(self) -> Optional[pulumi.Input[str]]:
"""
updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas)
"""
return pulumi.get(self, "update_revision")
@update_revision.setter
def update_revision(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_revision", value)
@property
@pulumi.getter(name="updatedReplicas")
def updated_replicas(self) -> Optional[pulumi.Input[int]]:
"""
updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision.
"""
return pulumi.get(self, "updated_replicas")
@updated_replicas.setter
def updated_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "updated_replicas", value)
@pulumi.input_type
class StatefulSetUpdateStrategyArgs:
def __init__(__self__, *,
rolling_update: Optional[pulumi.Input['RollingUpdateStatefulSetStrategyArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
"""
StatefulSetUpdateStrategy indicates the strategy that the StatefulSet controller will use to perform updates. It includes any additional parameters necessary to perform the update for the indicated strategy.
:param pulumi.Input['RollingUpdateStatefulSetStrategyArgs'] rolling_update: RollingUpdate is used to communicate parameters when Type is RollingUpdateStatefulSetStrategyType.
:param pulumi.Input[str] type: Type indicates the type of the StatefulSetUpdateStrategy. Default is RollingUpdate.
Possible enum values:
- `"OnDelete"` triggers the legacy behavior. Version tracking and ordered rolling restarts are disabled. Pods are recreated from the StatefulSetSpec when they are manually deleted. When a scale operation is performed with this strategy,specification version indicated by the StatefulSet's currentRevision.
- `"RollingUpdate"` indicates that update will be applied to all Pods in the StatefulSet with respect to the StatefulSet ordering constraints. When a scale operation is performed with this strategy, new Pods will be created from the specification version indicated by the StatefulSet's updateRevision.
"""
if rolling_update is not None:
pulumi.set(__self__, "rolling_update", rolling_update)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="rollingUpdate")
def rolling_update(self) -> Optional[pulumi.Input['RollingUpdateStatefulSetStrategyArgs']]:
"""
RollingUpdate is used to communicate parameters when Type is RollingUpdateStatefulSetStrategyType.
"""
return pulumi.get(self, "rolling_update")
@rolling_update.setter
def rolling_update(self, value: Optional[pulumi.Input['RollingUpdateStatefulSetStrategyArgs']]):
pulumi.set(self, "rolling_update", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type indicates the type of the StatefulSetUpdateStrategy. Default is RollingUpdate.
Possible enum values:
- `"OnDelete"` triggers the legacy behavior. Version tracking and ordered rolling restarts are disabled. Pods are recreated from the StatefulSetSpec when they are manually deleted. When a scale operation is performed with this strategy,specification version indicated by the StatefulSet's currentRevision.
- `"RollingUpdate"` indicates that update will be applied to all Pods in the StatefulSet with respect to the StatefulSet ordering constraints. When a scale operation is performed with this strategy, new Pods will be created from the specification version indicated by the StatefulSet's updateRevision.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class StatefulSetArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
spec: Optional[pulumi.Input['StatefulSetSpecArgs']] = None,
status: Optional[pulumi.Input['StatefulSetStatusArgs']] = None):
"""
StatefulSet represents a set of pods with consistent identities. Identities are defined as:
- Network: A single stable DNS and hostname.
- Storage: As many VolumeClaims as requested.
The StatefulSet guarantees that a given network identity will always map to the same storage identity.
This resource waits until its status is ready before registering success
for create/update, and populating output properties from the current state of the resource.
The following conditions are used to determine whether the resource creation has
succeeded or failed:
1. The value of 'spec.replicas' matches '.status.replicas', '.status.currentReplicas',
and '.status.readyReplicas'.
2. The value of '.status.updateRevision' matches '.status.currentRevision'.
If the StatefulSet has not reached a Ready state after 10 minutes, it will
time out and mark the resource update as Failed. You can override the default timeout value
by setting the 'customTimeouts' option on the resource.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['StatefulSetSpecArgs'] spec: Spec defines the desired identities of pods in this set.
:param pulumi.Input['StatefulSetStatusArgs'] status: Status is the current status of Pods in this StatefulSet. This data may be out of date by some window of time.
"""
if api_version is not None:
pulumi.set(__self__, "api_version", 'apps/v1')
if kind is not None:
pulumi.set(__self__, "kind", 'StatefulSet')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['StatefulSetSpecArgs']]:
"""
Spec defines the desired identities of pods in this set.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['StatefulSetSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['StatefulSetStatusArgs']]:
"""
Status is the current status of Pods in this StatefulSet. This data may be out of date by some window of time.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['StatefulSetStatusArgs']]):
pulumi.set(self, "status", value)
| 55.117379
| 1,342
| 0.700177
| 15,224
| 121,148
| 5.468931
| 0.045914
| 0.068701
| 0.065038
| 0.027384
| 0.905764
| 0.874008
| 0.856916
| 0.824728
| 0.811228
| 0.796563
| 0
| 0.00283
| 0.209554
| 121,148
| 2,197
| 1,343
| 55.142467
| 0.866616
| 0.464869
| 0
| 0.737705
| 1
| 0
| 0.138115
| 0.056098
| 0
| 0
| 0
| 0
| 0
| 1
| 0.206089
| false
| 0
| 0.005464
| 0
| 0.323966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
be341b599ae85c5170fb18fb593e8b3272ecfe2d
| 10,135
|
py
|
Python
|
Scripts/customgendersettings/settings/setting_utils.py
|
ColonolNutty/CustomGenderSettings
|
aa8d3e485499e92ca3687744014932c036451525
|
[
"CC-BY-4.0"
] | 6
|
2020-02-08T08:11:52.000Z
|
2021-08-03T03:38:07.000Z
|
Scripts/customgendersettings/settings/setting_utils.py
|
ColonolNutty/CustomGenderSettings
|
aa8d3e485499e92ca3687744014932c036451525
|
[
"CC-BY-4.0"
] | null | null | null |
Scripts/customgendersettings/settings/setting_utils.py
|
ColonolNutty/CustomGenderSettings
|
aa8d3e485499e92ca3687744014932c036451525
|
[
"CC-BY-4.0"
] | null | null | null |
"""
Custom Gender Settings is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""
from typing import Any
from customgendersettings.persistence.cgs_data_manager_utils import CGSDataManagerUtils
from customgendersettings.settings.settings import CGSGlobalSetting
from sims.sim_info import SimInfo
from sims4communitylib.utils.sims.common_species_utils import CommonSpeciesUtils
class CGSSettingUtils:
""" Setting Utilities used by the CGS mod. """
def __init__(self) -> None:
self._data_manager = CGSDataManagerUtils()
self.all_male_options = CGSSettingUtils.AllMaleOptions(self)
self.all_female_options = CGSSettingUtils.AllFemaleOptions(self)
def is_enabled_for_interactions(self, sim_info: SimInfo) -> bool:
""" Determine if a Sim is enabled for Custom Gender Setting interactions. """
return CommonSpeciesUtils.is_human(sim_info) or CommonSpeciesUtils.is_animal(sim_info)
def force_all_sims_to_male(self) -> bool:
""" Determine if all Sims should be forced to Male. """
return self._is_forced_on(CGSGlobalSetting.ALL_SIMS_FORCE_AS_MALE)
def force_all_sims_to_female(self) -> bool:
""" Determine if all Sims should be forced to Male. """
return self._is_forced_off(CGSGlobalSetting.ALL_SIMS_FORCE_AS_MALE)
class AllMaleOptions:
""" All Male Options. """
def __init__(self, setting_utils: 'CGSSettingUtils'):
self._setting_utils = setting_utils
def force_breasts_on(self) -> bool:
""" Determine if all Male Sims should have breasts. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_MALE_SIMS_BREASTS)
def force_breasts_off(self) -> bool:
""" Determine if all Male Sims should have no breasts. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_BREASTS)
def use_toilet_standing(self) -> bool:
""" Determine if all Male Sims should use the toilet standing. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_MALE_SIMS_USE_TOILET_STANDING)
def dont_use_toilet_standing(self) -> bool:
""" Determine if all Male Sims should not use the toilet standing. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_USE_TOILET_STANDING)
def use_toilet_sitting(self) -> bool:
""" Determine if all Male Sims should use the toilet sitting. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_MALE_SIMS_USE_TOILET_SITTING)
def dont_use_toilet_sitting(self) -> bool:
""" Determine if all Male Sims should not use the toilet sitting. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_USE_TOILET_SITTING)
def prefer_menswear(self) -> bool:
""" Determine if all Male Sims should prefer menswear. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_MALE_SIMS_PREFER_MENSWEAR)
def prefer_womenswear(self) -> bool:
""" Determine if all Male Sims should prefer womenswear. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_PREFER_MENSWEAR)
def force_masculine_body_frame(self) -> bool:
""" Determine if all Male Sims should use a masculine body frame. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_MALE_SIMS_HAVE_MASCULINE_FRAME)
def force_feminine_body_frame(self) -> bool:
""" Determine if all Male Sims should use a feminine body frame. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_HAVE_MASCULINE_FRAME)
def can_impregnate(self) -> bool:
""" Determine if all Male Sims should be able to impregnate. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_MALE_SIMS_CAN_IMPREGNATE)
def cannot_impregnate(self) -> bool:
""" Determine if all Male Sims should not be able to impregnate. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_CAN_IMPREGNATE)
def can_be_impregnated(self) -> bool:
""" Determine if all Male Sims should be able to be impregnated. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_MALE_SIMS_CAN_BE_IMPREGNATED)
def cannot_be_impregnated(self) -> bool:
""" Determine if all Male Sims should not be able to be impregnated. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_CAN_BE_IMPREGNATED)
def can_reproduce(self) -> bool:
""" Determine if all Male Sims should be able to reproduce. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_MALE_SIMS_CAN_REPRODUCE)
def cannot_reproduce(self) -> bool:
""" Determine if all Male Sims should not be able to reproduce. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_CAN_REPRODUCE)
def should_regenerate_outfits(self) -> bool:
""" Determine if all Male Sims should have their outfits regenerated. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_MALE_SIMS_REGENERATE_CLOTHING_ON_GENDER_OPTIONS_CHANGED)
class AllFemaleOptions:
""" All Female Options. """
def __init__(self, setting_utils: 'CGSSettingUtils'):
self._setting_utils = setting_utils
def force_breasts_on(self) -> bool:
""" Determine if all Female Sims should have breasts. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_FEMALE_SIMS_BREASTS)
def force_breasts_off(self) -> bool:
""" Determine if all Female Sims should have no breasts. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_BREASTS)
def use_toilet_standing(self) -> bool:
""" Determine if all Female Sims should use the toilet standing. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_FEMALE_SIMS_USE_TOILET_STANDING)
def dont_use_toilet_standing(self) -> bool:
""" Determine if all Female Sims should not use the toilet standing. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_USE_TOILET_STANDING)
def use_toilet_sitting(self) -> bool:
""" Determine if all Female Sims should use the toilet sitting. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_FEMALE_SIMS_USE_TOILET_SITTING)
def dont_use_toilet_sitting(self) -> bool:
""" Determine if all Female Sims should not use the toilet sitting. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_USE_TOILET_SITTING)
def prefer_menswear(self) -> bool:
""" Determine if all Female Sims should prefer menswear. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_FEMALE_SIMS_PREFER_MENSWEAR)
def prefer_womenswear(self) -> bool:
""" Determine if all Female Sims should prefer womenswear. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_PREFER_MENSWEAR)
def force_masculine_body_frame(self) -> bool:
""" Determine if all Female Sims should use a masculine body frame. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_FEMALE_SIMS_HAVE_MASCULINE_FRAME)
def force_feminine_body_frame(self) -> bool:
""" Determine if all Female Sims should use a feminine body frame. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_HAVE_MASCULINE_FRAME)
def can_impregnate(self) -> bool:
""" Determine if all Female Sims should be able to impregnate. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_FEMALE_SIMS_CAN_IMPREGNATE)
def cannot_impregnate(self) -> bool:
""" Determine if all Female Sims should not be able to impregnate. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_CAN_IMPREGNATE)
def can_be_impregnated(self) -> bool:
""" Determine if all Female Sims should be able to be impregnated. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_FEMALE_SIMS_CAN_BE_IMPREGNATED)
def cannot_be_impregnated(self) -> bool:
""" Determine if all Female Sims should not be able to be impregnated. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_CAN_BE_IMPREGNATED)
def can_reproduce(self) -> bool:
""" Determine if all Female Sims should be able to reproduce. """
return self._setting_utils._is_forced_on(CGSGlobalSetting.ALL_FEMALE_SIMS_CAN_REPRODUCE)
def cannot_reproduce(self) -> bool:
""" Determine if all Female Sims should not be able to reproduce. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_CAN_REPRODUCE)
def should_regenerate_outfits(self) -> bool:
""" Determine if all Female Sims should have their outfits regenerated. """
return self._setting_utils._is_forced_off(CGSGlobalSetting.ALL_FEMALE_SIMS_REGENERATE_CLOTHING_ON_GENDER_OPTIONS_CHANGED)
def _is_forced_on(self, key: str) -> bool:
return self._get_value(key) is True
def _is_forced_off(self, key: str) -> bool:
return self._get_value(key) is False
def _is_disabled(self, key: str) -> bool:
return self._get_value(key) is None
def _get_value(self, key: str) -> Any:
return self._data_manager.get_global_mod_settings_data_store().get_value_by_key(key)
| 53.062827
| 133
| 0.711988
| 1,295
| 10,135
| 5.203089
| 0.090347
| 0.059365
| 0.090234
| 0.101514
| 0.854556
| 0.848768
| 0.838676
| 0.826803
| 0.826803
| 0.819383
| 0
| 0.001126
| 0.211643
| 10,135
| 190
| 134
| 53.342105
| 0.842178
| 0.251702
| 0
| 0.387755
| 0
| 0
| 0.004089
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.44898
| false
| 0
| 0.05102
| 0.040816
| 0.94898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
be8d76ecf5cf76f60ea579563feaa7cc9e7da4ce
| 15,916
|
py
|
Python
|
radar_exp/inhouse_radar.py
|
MAPS-Lab/mmdetection3d
|
773c2f29495863c73c72c187c08aae8ac23a848e
|
[
"Apache-2.0"
] | null | null | null |
radar_exp/inhouse_radar.py
|
MAPS-Lab/mmdetection3d
|
773c2f29495863c73c72c187c08aae8ac23a848e
|
[
"Apache-2.0"
] | null | null | null |
radar_exp/inhouse_radar.py
|
MAPS-Lab/mmdetection3d
|
773c2f29495863c73c72c187c08aae8ac23a848e
|
[
"Apache-2.0"
] | null | null | null |
voxel_size = [0.16, 0.16, 4]
model = dict(
type='VoxelNet',
voxel_layer=dict(
max_num_points=32,
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2],
voxel_size=[0.16, 0.16, 4],
max_voxels=(16000, 40000)),
voxel_encoder=dict(
type='PillarFeatureNet',
in_channels=6,
feat_channels=[64],
with_distance=False,
voxel_size=[0.16, 0.16, 4],
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2]),
middle_encoder=dict(
type='PointPillarsScatter', in_channels=64, output_shape=[496, 432]),
backbone=dict(
type='SECOND',
in_channels=64,
layer_nums=[3, 5, 5],
layer_strides=[2, 2, 2],
out_channels=[64, 128, 256]),
neck=dict(
type='SECONDFPN',
in_channels=[64, 128, 256],
upsample_strides=[1, 2, 4],
out_channels=[128, 128, 128]),
bbox_head=dict(
type='Anchor3DHead',
num_classes=3,
in_channels=384,
feat_channels=384,
use_direction_classifier=True,
anchor_generator=dict(
type='Anchor3DRangeGenerator',
ranges=[[0, -39.68, -0.6, 70.4, 39.68, -0.6],
[0, -39.68, -0.6, 70.4, 39.68, -0.6],
[0, -39.68, -1.78, 70.4, 39.68, -1.78]],
sizes=[[0.6, 0.8, 1.73], [0.6, 1.76, 1.73], [1.6, 3.9, 1.56]],
rotations=[0, 1.57],
reshape_out=False),
diff_rad_by_sin=True,
bbox_coder=dict(type='DeltaXYZWLHRBBoxCoder'),
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox=dict(
type='SmoothL1Loss', beta=0.1111111111111111, loss_weight=2.0),
loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2)),
train_cfg=dict(
assigner=[
dict(
type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.5,
neg_iou_thr=0.35,
min_pos_iou=0.35,
ignore_iof_thr=-1),
dict(
type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.5,
neg_iou_thr=0.35,
min_pos_iou=0.35,
ignore_iof_thr=-1),
dict(
type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.6,
neg_iou_thr=0.45,
min_pos_iou=0.45,
ignore_iof_thr=-1)
],
allowed_border=0,
pos_weight=-1,
debug=False),
test_cfg=dict(
use_rotate_nms=True,
nms_across_levels=False,
nms_thr=0.01,
score_thr=0.1,
min_bbox_size=0,
nms_pre=100,
max_num=50))
_point_cloud_range = [0, -39.68, -2, 69.12, 39.68, 2]
dataset_type = 'InhouseDataset'
data_root = 'data/inhouse_radar_only/kitti_format/'
pointcloud_dir = 'radar'
load_dim = 6
use_dim = 6
class_names = ['Pedestrian', 'Cyclist', 'Car']
point_cloud_range = [0, -39.68, -2, 69.12, 39.68, 2]
input_modality = dict(use_lidar=True, use_camera=False)
db_sampler = dict(
data_root='data/inhouse_radar_only/kitti_format/',
info_path='data/inhouse_radar_only/kitti_format/inhouse_dbinfos_train.pkl',
rate=1.0,
prepare=dict(
filter_by_difficulty=[-1],
filter_by_min_points=dict(Car=5, Pedestrian=10, Cyclist=10)),
classes=['Pedestrian', 'Cyclist', 'Car'],
sample_groups=dict(Car=15, Pedestrian=0, Cyclist=0),
points_loader=dict(
type='LoadPointsFromFile', coord_type='LIDAR', load_dim=6, use_dim=6))
file_client_args = dict(backend='disk')
train_pipeline = [
dict(type='LoadPointsFromFile', coord_type='LIDAR', load_dim=6, use_dim=6),
dict(type='LoadAnnotations3D', with_bbox_3d=True, with_label_3d=True),
dict(
type='ObjectSample',
db_sampler=dict(
data_root='data/inhouse_radar_only/kitti_format/',
info_path=
'data/inhouse_radar_only/kitti_format/inhouse_dbinfos_train.pkl',
rate=1.0,
prepare=dict(
filter_by_difficulty=[-1],
filter_by_min_points=dict(Car=5, Pedestrian=10, Cyclist=10)),
classes=['Pedestrian', 'Cyclist', 'Car'],
sample_groups=dict(Car=15, Pedestrian=0, Cyclist=0),
points_loader=dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6))),
dict(
type='ObjectNoise',
num_try=100,
translation_std=[0.25, 0.25, 0.25],
global_rot_range=[0.0, 0.0],
rot_range=[-0.15707963267, 0.15707963267]),
dict(type='RandomFlip3D', flip_ratio_bev_horizontal=0.5),
dict(
type='GlobalRotScaleTrans',
rot_range=[-0.78539816, 0.78539816],
scale_ratio_range=[0.95, 1.05]),
dict(
type='PointsRangeFilter',
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2]),
dict(
type='ObjectRangeFilter',
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2]),
dict(type='PointShuffle'),
dict(
type='DefaultFormatBundle3D',
class_names=['Pedestrian', 'Cyclist', 'Car']),
dict(type='Collect3D', keys=['points', 'gt_bboxes_3d', 'gt_labels_3d'])
]
test_pipeline = [
dict(type='LoadPointsFromFile', coord_type='LIDAR', load_dim=6, use_dim=6),
dict(
type='MultiScaleFlipAug3D',
img_scale=(1333, 800),
pts_scale_ratio=1,
flip=False,
transforms=[
dict(
type='GlobalRotScaleTrans',
rot_range=[0, 0],
scale_ratio_range=[1.0, 1.0],
translation_std=[0, 0, 0]),
dict(type='RandomFlip3D'),
dict(
type='PointsRangeFilter',
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2]),
dict(
type='DefaultFormatBundle3D',
class_names=['Pedestrian', 'Cyclist', 'Car'],
with_label=False),
dict(type='Collect3D', keys=['points'])
])
]
eval_pipeline = [
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6,
file_client_args=dict(backend='disk')),
dict(
type='DefaultFormatBundle3D',
class_names=['Pedestrian', 'Cyclist', 'Car'],
with_label=False),
dict(type='Collect3D', keys=['points'])
]
train_dataset = dict(
type='InhouseDataset',
data_root='data/inhouse_radar_only/kitti_format/',
ann_file='data/inhouse_radar_only/kitti_format/inhouse_infos_train.pkl',
split='training',
pts_prefix='radar',
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6,
file_client_args=dict(backend='disk')),
dict(
type='LoadAnnotations3D',
with_bbox_3d=True,
with_label_3d=True,
file_client_args=dict(backend='disk')),
dict(
type='ObjectSample',
db_sampler=dict(
data_root='data/inhouse_radar_only/kitti_format/',
info_path=
'data/inhouse_radar_only/kitti_format/inhouse_dbinfos_train.pkl',
rate=1.0,
prepare=dict(
filter_by_difficulty=[-1],
filter_by_min_points=dict(
Car=5, Pedestrian=10, Cyclist=10)),
classes=['Pedestrian', 'Cyclist', 'Car'],
sample_groups=dict(Car=12, Pedestrian=0, Cyclist=0),
points_loader=dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6))),
dict(
type='ObjectNoise',
num_try=100,
translation_std=[1.0, 1.0, 0.5],
global_rot_range=[0.0, 0.0],
rot_range=[-0.78539816, 0.78539816]),
dict(type='RandomFlip3D', flip_ratio_bev_horizontal=0.5),
dict(
type='GlobalRotScaleTrans',
rot_range=[-0.78539816, 0.78539816],
scale_ratio_range=[0.95, 1.05]),
dict(
type='PointsRangeFilter',
point_cloud_range=[0, -40, -2, 70.4, 40, 2]),
dict(
type='ObjectRangeFilter',
point_cloud_range=[0, -40, -2, 70.4, 40, 2]),
dict(type='PointShuffle'),
dict(
type='DefaultFormatBundle3D',
class_names=['Pedestrian', 'Cyclist', 'Car']),
dict(
type='Collect3D', keys=['points', 'gt_bboxes_3d', 'gt_labels_3d'])
],
modality=dict(use_lidar=True, use_camera=False),
classes=['Pedestrian', 'Cyclist', 'Car'],
test_mode=False,
box_type_3d='LiDAR')
data = dict(
samples_per_gpu=11,
workers_per_gpu=4,
train=dict(
type='InhouseDataset',
data_root='data/inhouse_radar_only/kitti_format/',
ann_file='data/inhouse_radar_only/kitti_format/inhouse_infos_train.pkl',
split='training',
pts_prefix='radar',
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6),
dict(
type='LoadAnnotations3D',
with_bbox_3d=True,
with_label_3d=True),
dict(
type='ObjectSample',
db_sampler=dict(
data_root='data/inhouse_radar_only/kitti_format/',
info_path=
'data/inhouse_radar_only/kitti_format/inhouse_dbinfos_train.pkl',
rate=1.0,
prepare=dict(
filter_by_difficulty=[-1],
filter_by_min_points=dict(
Car=5, Pedestrian=10, Cyclist=10)),
classes=['Pedestrian', 'Cyclist', 'Car'],
sample_groups=dict(Car=15, Pedestrian=0, Cyclist=0),
points_loader=dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6))),
dict(
type='ObjectNoise',
num_try=100,
translation_std=[0.25, 0.25, 0.25],
global_rot_range=[0.0, 0.0],
rot_range=[-0.15707963267, 0.15707963267]),
dict(type='RandomFlip3D', flip_ratio_bev_horizontal=0.5),
dict(
type='GlobalRotScaleTrans',
rot_range=[-0.78539816, 0.78539816],
scale_ratio_range=[0.95, 1.05]),
dict(
type='PointsRangeFilter',
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2]),
dict(
type='ObjectRangeFilter',
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2]),
dict(type='PointShuffle'),
dict(
type='DefaultFormatBundle3D',
class_names=['Pedestrian', 'Cyclist', 'Car']),
dict(
type='Collect3D',
keys=['points', 'gt_bboxes_3d', 'gt_labels_3d'])
],
modality=dict(use_lidar=True, use_camera=False),
classes=['Pedestrian', 'Cyclist', 'Car'],
test_mode=False,
box_type_3d='LiDAR'),
val=dict(
type='InhouseDataset',
data_root='data/inhouse_radar_only/kitti_format/',
ann_file='data/inhouse_radar_only/kitti_format/inhouse_infos_val.pkl',
split='training',
pts_prefix='radar',
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6),
dict(
type='MultiScaleFlipAug3D',
img_scale=(1333, 800),
pts_scale_ratio=1,
flip=False,
transforms=[
dict(
type='GlobalRotScaleTrans',
rot_range=[0, 0],
scale_ratio_range=[1.0, 1.0],
translation_std=[0, 0, 0]),
dict(type='RandomFlip3D'),
dict(
type='PointsRangeFilter',
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2]),
dict(
type='DefaultFormatBundle3D',
class_names=['Pedestrian', 'Cyclist', 'Car'],
with_label=False),
dict(type='Collect3D', keys=['points'])
])
],
modality=dict(use_lidar=True, use_camera=False),
classes=['Pedestrian', 'Cyclist', 'Car'],
test_mode=True,
box_type_3d='LiDAR'),
test=dict(
type='InhouseDataset',
data_root='data/inhouse_radar_only/kitti_format/',
ann_file='data/inhouse_radar_only/kitti_format/inhouse_infos_val.pkl',
split='training',
pts_prefix='radar',
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6),
dict(
type='MultiScaleFlipAug3D',
img_scale=(1333, 800),
pts_scale_ratio=1,
flip=False,
transforms=[
dict(
type='GlobalRotScaleTrans',
rot_range=[0, 0],
scale_ratio_range=[1.0, 1.0],
translation_std=[0, 0, 0]),
dict(type='RandomFlip3D'),
dict(
type='PointsRangeFilter',
point_cloud_range=[0, -39.68, -2, 69.12, 39.68, 2]),
dict(
type='DefaultFormatBundle3D',
class_names=['Pedestrian', 'Cyclist', 'Car'],
with_label=False),
dict(type='Collect3D', keys=['points'])
])
],
modality=dict(use_lidar=True, use_camera=False),
classes=['Pedestrian', 'Cyclist', 'Car'],
test_mode=True,
box_type_3d='LiDAR'))
evaluation = dict(
interval=4,
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=6,
file_client_args=dict(backend='disk')),
dict(
type='DefaultFormatBundle3D',
class_names=['Pedestrian', 'Cyclist', 'Car'],
with_label=False),
dict(type='Collect3D', keys=['points'])
])
lr = 0.001
optimizer = dict(type='AdamW', lr=0.001, betas=(0.95, 0.99), weight_decay=0.01)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
lr_config = dict(
policy='cyclic',
target_ratio=(10, 0.0001),
cyclic_times=1,
step_ratio_up=0.4)
momentum_config = dict(
policy='cyclic',
target_ratio=(0.8947368421052632, 1),
cyclic_times=1,
step_ratio_up=0.4)
runner = dict(type='EpochBasedRunner', max_epochs=160)
checkpoint_config = dict(interval=1)
log_config = dict(
interval=50,
hooks=[dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')])
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = 'radar_exp'
load_from = None
resume_from = None
workflow = [('train', 1)]
gpu_ids = [0]
| 36.3379
| 85
| 0.525383
| 1,764
| 15,916
| 4.498299
| 0.148526
| 0.089729
| 0.013863
| 0.042848
| 0.806427
| 0.802647
| 0.790296
| 0.784247
| 0.76736
| 0.76736
| 0
| 0.080401
| 0.341229
| 15,916
| 437
| 86
| 36.421053
| 0.676395
| 0
| 0
| 0.709382
| 0
| 0
| 0.179505
| 0.069867
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe2851c5b0fc03d68e53503c16d1d0cc09c93984
| 98
|
py
|
Python
|
acmedns/__init__.py
|
wayoos/acme-dns
|
e29a16c565b353d18a8b48d691fbe8c8ea6f375a
|
[
"MIT"
] | 2
|
2016-09-29T18:59:24.000Z
|
2017-10-09T17:13:01.000Z
|
acmedns/__init__.py
|
wayoos/acme-dns
|
e29a16c565b353d18a8b48d691fbe8c8ea6f375a
|
[
"MIT"
] | null | null | null |
acmedns/__init__.py
|
wayoos/acme-dns
|
e29a16c565b353d18a8b48d691fbe8c8ea6f375a
|
[
"MIT"
] | null | null | null |
#from config import ConfigurationManager
from client import ClientConfig
from client import Client
| 32.666667
| 40
| 0.877551
| 12
| 98
| 7.166667
| 0.5
| 0.232558
| 0.372093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112245
| 98
| 3
| 41
| 32.666667
| 0.988506
| 0.397959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fe31f1f542fe71bd31a90216bd332737ef05181d
| 384
|
py
|
Python
|
flask-portal/tests/test_vacancy.py
|
TUMTICS-Dev/Talent-Pool
|
c51376453184e7a83d684ab5f9467535d49cc784
|
[
"MIT"
] | null | null | null |
flask-portal/tests/test_vacancy.py
|
TUMTICS-Dev/Talent-Pool
|
c51376453184e7a83d684ab5f9467535d49cc784
|
[
"MIT"
] | null | null | null |
flask-portal/tests/test_vacancy.py
|
TUMTICS-Dev/Talent-Pool
|
c51376453184e7a83d684ab5f9467535d49cc784
|
[
"MIT"
] | null | null | null |
import unittest
class Test_Vacancy(unittest.TestCase):
def test_all(self):
# TODO
pass
def test_delete(self):
# TODO
pass
def test_exists(self):
# TODO
pass
def test_fetch(self):
# TODO
pass
def test_first(self):
# TODO
pass
def test_save(self):
# TODO
pass
| 14.222222
| 38
| 0.510417
| 43
| 384
| 4.395349
| 0.372093
| 0.222222
| 0.380952
| 0.396825
| 0.502646
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 384
| 27
| 39
| 14.222222
| 0.84375
| 0.075521
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0
| 1
| 0.428571
| false
| 0.428571
| 0.071429
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
fe5899192c186935560d0bfa6dee9961496db839
| 342
|
py
|
Python
|
build/lib/YouTubeCommentAnalysis/__init__.py
|
nguyenha1910/YouTubeCommentAnalysis
|
2bf454e034bbef7c032a4e28f5caa2877ca51836
|
[
"MIT"
] | null | null | null |
build/lib/YouTubeCommentAnalysis/__init__.py
|
nguyenha1910/YouTubeCommentAnalysis
|
2bf454e034bbef7c032a4e28f5caa2877ca51836
|
[
"MIT"
] | null | null | null |
build/lib/YouTubeCommentAnalysis/__init__.py
|
nguyenha1910/YouTubeCommentAnalysis
|
2bf454e034bbef7c032a4e28f5caa2877ca51836
|
[
"MIT"
] | null | null | null |
from YouTubeCommentAnalysis import FileExIm
from YouTubeCommentAnalysis import TextProcessing
from YouTubeCommentAnalysis import CommentCorpus
from YouTubeCommentAnalysis import EmotionAnalysis
from YouTubeCommentAnalysis import WordAssociation
__all__ = ['FileExIm', 'TextProcessing', 'CommentCorpus', 'EmotionAnalysis', 'WordAssociation']
| 42.75
| 95
| 0.868421
| 26
| 342
| 11.269231
| 0.346154
| 0.443686
| 0.546075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081871
| 342
| 7
| 96
| 48.857143
| 0.933121
| 0
| 0
| 0
| 0
| 0
| 0.190058
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.833333
| 0
| 0.833333
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fe58ccc62dba5399d17e956c5730f300d9648d36
| 7,923
|
py
|
Python
|
tests/test_drw_dataset_s82.py
|
jiwoncpark/lens-classification
|
c1faf4dbbd4a16f2df74a34fd593ec7128750252
|
[
"MIT"
] | null | null | null |
tests/test_drw_dataset_s82.py
|
jiwoncpark/lens-classification
|
c1faf4dbbd4a16f2df74a34fd593ec7128750252
|
[
"MIT"
] | 21
|
2018-05-29T20:13:11.000Z
|
2018-07-13T02:32:35.000Z
|
tests/test_drw_dataset_s82.py
|
jiwoncpark/lens-classification
|
c1faf4dbbd4a16f2df74a34fd593ec7128750252
|
[
"MIT"
] | null | null | null |
import os
import unittest
import shutil
import numpy as np
from magnificat.drw_dataset import DRWDataset
from magnificat.samplers.s82_sampler import S82Sampler
class TestDRWDatasetS82S82Sampler(unittest.TestCase):
"""Tests for LSSTCadence class
"""
def setUp(self):
self.out_dir = 'drw_data_s82_testing'
self.obs_dir = 'obs_testing'
self.sampler_dir = 's82_sampler_testing'
os.makedirs(self.out_dir, exist_ok=True)
def test_constructor(self):
"""Test input and output shapes of get_pointings
"""
agn_params = ['BH_mass', 'redshift', 'M_i', 'u', 'g', 'r', 'i', 'z']
sampler = S82Sampler(agn_params=agn_params,
bp_params=['log_rf_tau', 'log_sf_inf'],
bandpasses=list('ugriz'),
out_dir=self.sampler_dir,
seed=123)
sampler.process_metadata()
sampler.idx = [0, 1]
obs_kwargs = dict(n_pointings_init=3,
obs_dir=self.obs_dir,
bandpasses=list('ugriz'))
drw_dataset = DRWDataset(sampler,
self.out_dir,
num_samples=2,
is_training=True,
transform_x_func=lambda x: x,
transform_y_func=lambda x: x,
prestored_bandpasses=list('ugriz'),
seed=123,
obs_kwargs=obs_kwargs)
def test_seeding(self):
"""Test seeding of sightlines
"""
# Run 0
agn_params = ['BH_mass', 'redshift', 'M_i', 'u', 'g', 'r', 'i', 'z']
sampler = S82Sampler(agn_params=agn_params,
bp_params=['log_rf_tau', 'log_sf_inf'],
bandpasses=list('ugriz'),
out_dir=self.sampler_dir,
seed=123)
sampler.process_metadata()
sampler.idx = [0, 1]
obs_kwargs = dict(n_pointings_init=3,
obs_dir=self.obs_dir,
bandpasses=list('ugriz'))
drw_dataset = DRWDataset(sampler,
self.out_dir,
num_samples=2,
is_training=True,
transform_x_func=lambda x: x,
transform_y_func=lambda x: x,
prestored_bandpasses=list('ugriz'),
seed=123,
obs_kwargs=obs_kwargs)
n_pointings_run0 = drw_dataset.cadence_obj.n_pointings
# Run 1
agn_params = ['BH_mass', 'redshift', 'M_i', 'u', 'g', 'r', 'i', 'z']
sampler = S82Sampler(agn_params=agn_params,
bp_params=['log_rf_tau', 'log_sf_inf'],
bandpasses=list('ugriz'),
out_dir=self.sampler_dir,
seed=123)
sampler.process_metadata()
sampler.idx = [0, 1]
obs_kwargs = dict(n_pointings_init=3,
obs_dir=self.obs_dir,
bandpasses=list('ugriz'))
drw_dataset = DRWDataset(sampler,
self.out_dir,
num_samples=2,
is_training=True,
transform_x_func=lambda x: x,
transform_y_func=lambda x: x,
prestored_bandpasses=list('ugriz'),
seed=123,
obs_kwargs=obs_kwargs)
n_pointings_run1 = drw_dataset.cadence_obj.n_pointings
np.testing.assert_equal(n_pointings_run0, n_pointings_run1)
def test_getitem(self):
"""Test `__getitem__`
"""
agn_params = ['BH_mass', 'redshift', 'M_i', 'u', 'g', 'r', 'i', 'z']
sampler = S82Sampler(agn_params=agn_params,
bp_params=['log_rf_tau', 'log_sf_inf'],
bandpasses=list('ugriz'),
out_dir=self.sampler_dir,
seed=123)
sampler.process_metadata()
sampler.idx = [0, 1]
obs_kwargs = dict(n_pointings_init=3,
obs_dir=self.obs_dir,
bandpasses=list('ugriz'))
drw_dataset = DRWDataset(sampler,
self.out_dir,
num_samples=2,
is_training=True,
transform_x_func=lambda x: x,
transform_y_func=lambda x: x,
prestored_bandpasses=list('ugriz'),
seed=123,
obs_kwargs=obs_kwargs)
data = drw_dataset[0]
# bandpasses
np.testing.assert_array_equal(drw_dataset.bandpasses,
list('ugriz'))
np.testing.assert_array_equal(drw_dataset.bandpasses_int,
[0, 1, 2, 3, 4])
# x
assert len(data['x']) == drw_dataset.trimmed_T
# y
np.testing.assert_array_equal(data['y'].shape,
[drw_dataset.trimmed_T, 5])
assert not (data['y'] < -50).any() # can't be -99
# param
assert len(data['params']) == len(drw_dataset.param_names)
# trimmed_mask
np.testing.assert_array_equal(data['trimmed_mask'].shape,
[drw_dataset.trimmed_T, 5])
def test_getitem_singleband(self):
"""Test `__getitem__`
"""
agn_params = ['BH_mass', 'redshift', 'M_i', 'i']
sampler = S82Sampler(agn_params=agn_params,
bp_params=['log_rf_tau', 'log_sf_inf'],
bandpasses=list('i'),
out_dir=self.sampler_dir,
seed=123)
sampler.process_metadata()
sampler.idx = [0, 1]
obs_kwargs = dict(n_pointings_init=3,
obs_dir=self.obs_dir,
bandpasses=list('i'))
drw_dataset = DRWDataset(sampler,
self.out_dir,
num_samples=2,
is_training=True,
transform_x_func=lambda x: x,
transform_y_func=lambda x: x,
prestored_bandpasses=list('i'),
seed=123,
obs_kwargs=obs_kwargs)
data = drw_dataset[0]
# bandpasses
np.testing.assert_array_equal(drw_dataset.bandpasses,
list('i'))
np.testing.assert_array_equal(drw_dataset.bandpasses_int,
[3])
# x
assert len(data['x']) == drw_dataset.trimmed_T
# y
np.testing.assert_array_equal(data['y'].shape,
[drw_dataset.trimmed_T, 1])
assert not (data['y'] < -50).any() # can't be -99
# param
assert len(data['params']) == len(drw_dataset.param_names)
# trimmed_mask
np.testing.assert_array_equal(data['trimmed_mask'].shape,
[drw_dataset.trimmed_T, 1])
def tearDown(self):
shutil.rmtree(self.out_dir)
shutil.rmtree(self.obs_dir)
shutil.rmtree(self.sampler_dir)
if __name__ == '__main__':
unittest.main()
| 43.532967
| 76
| 0.462072
| 781
| 7,923
| 4.377721
| 0.152369
| 0.064346
| 0.072243
| 0.035098
| 0.808131
| 0.808131
| 0.789412
| 0.789412
| 0.789412
| 0.761334
| 0
| 0.022177
| 0.442257
| 7,923
| 181
| 77
| 43.773481
| 0.751527
| 0.036224
| 0
| 0.778523
| 0
| 0
| 0.05002
| 0
| 0
| 0
| 0
| 0
| 0.100671
| 1
| 0.040268
| false
| 0.127517
| 0.040268
| 0
| 0.087248
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
fe715953ced4eae0d6dec313bca175659f98b7b6
| 185
|
py
|
Python
|
WorkingWithStrings/Justify.py
|
zac11/AutomateThingsWithPython
|
91289700781ab5509fdb0721e742bc57d465cc87
|
[
"MIT"
] | null | null | null |
WorkingWithStrings/Justify.py
|
zac11/AutomateThingsWithPython
|
91289700781ab5509fdb0721e742bc57d465cc87
|
[
"MIT"
] | null | null | null |
WorkingWithStrings/Justify.py
|
zac11/AutomateThingsWithPython
|
91289700781ab5509fdb0721e742bc57d465cc87
|
[
"MIT"
] | null | null | null |
String1="Big Bang"
print(String1.ljust(20))
print(String1.rjust(20))
print(String1.ljust(20,'*'))
print(String1.rjust(20,"+"))
print(String1.center(20))
print(String1.center(20,"="))
| 18.5
| 29
| 0.702703
| 27
| 185
| 4.814815
| 0.296296
| 0.553846
| 0.538462
| 0.292308
| 0.892308
| 0.676923
| 0.676923
| 0.676923
| 0.676923
| 0.676923
| 0
| 0.108571
| 0.054054
| 185
| 10
| 29
| 18.5
| 0.634286
| 0
| 0
| 0
| 0
| 0
| 0.05914
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.857143
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
fe87aa147a9dda6d13a093e8a961c8ee2edc1398
| 100
|
py
|
Python
|
lzo_indexer/__init__.py
|
Orhideous/python3_lzo_indexer
|
aaa6498bb5d3464c36e6103599035028411fb950
|
[
"Apache-2.0"
] | 1
|
2020-06-24T13:59:40.000Z
|
2020-06-24T13:59:40.000Z
|
lzo_indexer/__init__.py
|
Orhideous/python3_lzo_indexer
|
aaa6498bb5d3464c36e6103599035028411fb950
|
[
"Apache-2.0"
] | 49
|
2018-09-29T23:42:51.000Z
|
2021-12-01T18:29:12.000Z
|
lzo_indexer/__init__.py
|
Orhideous/python3_lzo_indexer
|
aaa6498bb5d3464c36e6103599035028411fb950
|
[
"Apache-2.0"
] | null | null | null |
from .indexer import get_lzo_blocks # noqa: F401
from .indexer import index_lzo_file # noqa: F401
| 33.333333
| 49
| 0.78
| 16
| 100
| 4.625
| 0.625
| 0.297297
| 0.459459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.16
| 100
| 2
| 50
| 50
| 0.809524
| 0.21
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fea8e2eb70d99a91b45ed1fc55252d0dc418cdad
| 98
|
py
|
Python
|
core/datasets/__init__.py
|
swipswaps/retinal_oct
|
a99f93d88833fc328b9b7f6aaabe1310632c644b
|
[
"MIT"
] | 15
|
2021-01-29T17:05:38.000Z
|
2022-03-16T17:47:42.000Z
|
core/datasets/__init__.py
|
solomonkimunyu/retinal_oct
|
a99f93d88833fc328b9b7f6aaabe1310632c644b
|
[
"MIT"
] | null | null | null |
core/datasets/__init__.py
|
solomonkimunyu/retinal_oct
|
a99f93d88833fc328b9b7f6aaabe1310632c644b
|
[
"MIT"
] | 14
|
2021-03-03T03:16:31.000Z
|
2022-03-23T19:23:42.000Z
|
from .retina_dataset import RetinaDataset
from .retina_dataset_wrapper import RetinaDatasetWrapper
| 49
| 56
| 0.908163
| 11
| 98
| 7.818182
| 0.636364
| 0.232558
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 98
| 2
| 56
| 49
| 0.945055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2290e83ec33d50f62bfde62648843c3e425ff334
| 107
|
py
|
Python
|
models/event_argument_extraction/__init__.py
|
semantic-systems/coypu-EventExtraction
|
be1b6a376c05f333dc97eaabc827e10c418d1582
|
[
"MIT"
] | null | null | null |
models/event_argument_extraction/__init__.py
|
semantic-systems/coypu-EventExtraction
|
be1b6a376c05f333dc97eaabc827e10c418d1582
|
[
"MIT"
] | null | null | null |
models/event_argument_extraction/__init__.py
|
semantic-systems/coypu-EventExtraction
|
be1b6a376c05f333dc97eaabc827e10c418d1582
|
[
"MIT"
] | null | null | null |
from models.event_argument_extraction.EventArgumentExtractor import OpenIEExtractor, EventArgumentExtractor
| 107
| 107
| 0.934579
| 9
| 107
| 10.888889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037383
| 107
| 1
| 107
| 107
| 0.951456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
22d839a3e700b32ca4c7dca53ea588ab503593bc
| 21,973
|
py
|
Python
|
src/user_gram.py
|
mcw519/Browine
|
62c979e8c7c858ab5342f0dc87e623e2594d3cbb
|
[
"Apache-2.0"
] | 1
|
2022-03-01T11:15:44.000Z
|
2022-03-01T11:15:44.000Z
|
src/user_gram.py
|
mcw519/Browine
|
62c979e8c7c858ab5342f0dc87e623e2594d3cbb
|
[
"Apache-2.0"
] | null | null | null |
src/user_gram.py
|
mcw519/Browine
|
62c979e8c7c858ab5342f0dc87e623e2594d3cbb
|
[
"Apache-2.0"
] | 1
|
2021-06-24T06:12:30.000Z
|
2021-06-24T06:12:30.000Z
|
# Copyright 2020 (author: Meng Wu)
import pynini
import itertools
import re
from .utils import DataIO, lex_add_disambig
from .tokenizer import Tokenizer
from .common import fst_to_linear_sequence
class GrammarHelper():
def __init__(self, words, phones, jieba_lex=None):
# initial basic English, Punctuation and special symbol char in sigma_star, ASCII
self.chars = [ chr(i) for i in range(1, 91) ] + [ r"\[", r"\\", r"\]" ] + [ chr(i) for i in range(94, 256) ]
self.data_io = DataIO()
self.word_tb = self.load_symbols(words)
self.phone_tb = self.load_symbols(phones)
# init tokenizer
if jieba_lex is not None:
self.tokenizer = Tokenizer(backend="jieba", jieba_dict=jieba_lex)
else:
self.tokenizer = Tokenizer(backend="jieba")
def load_symbols(self, symbol_table):
'''
It's not a real symbol table.
Assume symbols_tables as same form as Kaldi's words.txt and phones.txt
Then, convert text(string) to dict's key.
'''
wd_syms = self.data_io.read_word_table(symbol_table)
return wd_syms
def pair2fst(self, x, in_syms=None, out_syms=None, weight=None):
'''
Input:
x: pair with dim 1 x 2, means input/output pair
in_syms: symbol table (dict)
out_syms: symbol table (dict)
Return:
pynini.Fst object
'''
tokenizer = self.tokenizer
# english split by space
if x[0].isascii():
_in = x[0].strip().split(" ")
else:
_in = tokenizer.segment(x[0]).split(" ")
if x[1].isascii():
_out = x[1].strip().split(" ")
else:
_out = tokenizer.segment(x[1]).split(" ")
_temp = [_in, _out]
_fst = pynini.Fst()
if weight is not None:
_arc_weight = pynini.Weight("tropical", weight)
else:
_arc_weight = pynini.Weight("tropical", 0)
_fst.add_state() # 0-state
cur_state = 0 # start from 0-based
for pair in list(itertools.zip_longest(*_temp)):
_arc_in, _arc_out = pair
if in_syms is not None:
try:
_arc_in = int(in_syms[_arc_in])
except:
if _arc_in is None:
_arc_in = int(in_syms["<eps>"])
else:
raise ValueError("symbol not in symbol table")
if out_syms is not None:
try:
_arc_out = int(out_syms[_arc_out])
except:
if _arc_out is None:
_arc_out = int(out_syms["<eps>"])
else:
raise ValueError("symbol not in symbol table")
_fst.add_state()
_fst.add_arc(cur_state, pynini.Arc(int(_arc_in), int(_arc_out), _arc_weight, cur_state + 1))
cur_state += 1
_fst.set_start(0)
_fst.set_final(cur_state)
return _fst
def load_kaldi_lex_as_lfst(self, kaldi_lex, add_disambig=False, add_position=False, add_opt_sil="SIL", invert=False):
"""
create lexicon fst with phone-in/words-out symbols
"""
_opt_sil_state = ""
if add_disambig:
_lex, ndis = lex_add_disambig(kaldi_lex)
_has_disambig = False # initial tag
else:
_lex = self.data_io.read_file_to_list(kaldi_lex)
in_syms = self.phone_tb
out_syms = self.word_tb
_arc_weight = pynini.Weight("tropical", 0)
_lfst = pynini.Fst()
_lfst.add_state() # 0-state
cur_state = 0 # start from 0-based
for idx, line in enumerate(_lex):
if add_disambig:
line = line.strip().split()
if idx == 0:
_arc_in = int(in_syms["<eps>"])
_arc_out = int(out_syms["<eps>"])
_lfst.add_state()
_lfst.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_lfst.set_final(1)
cur_state = 1
_wd = line[0] # string
_ph_seq = line[1:] # list
if add_position:
if add_disambig and "#" in " ".join(_ph_seq):
_dis_symb = _ph_seq[-1]
_ph_seq = _ph_seq[:-1]
_has_disambig = True
if len(_ph_seq) == 1:
_ph_seq = [ _ph_seq[0] + "_S" ]
else:
for i, j in enumerate(_ph_seq):
if i == 0:
_ph_seq[i] = j + "_B"
elif i == len(_ph_seq):
_ph_seq[i] = j + "_E"
else:
_ph_seq[i] = j + "_I"
if _has_disambig:
_ph_seq.append(_dis_symb)
_has_disambig = False
_temp = [ _ph_seq, _wd.split(" ") ]
for sub_idx, pair in enumerate(list(itertools.zip_longest(*_temp))):
_arc_in, _arc_out = pair
try:
_arc_in = int(in_syms[_arc_in])
except:
if _arc_in is None:
_arc_in = int(in_syms["<eps>"])
else:
raise ValueError(_arc_in, "this symbol not in input symbol table")
try:
_arc_out = int(out_syms[_arc_out])
except:
if _arc_out is None:
_arc_out = int(out_syms["<eps>"])
else:
raise ValueError(_arc_out, "this symbol not in output symbol table")
_lfst.add_state()
if sub_idx == 0:
# each word start from state 1
_lfst.add_arc(1, pynini.Arc(_arc_in, _arc_out, _arc_weight, cur_state + 1))
else:
_lfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, cur_state + 1))
cur_state += 1
# return to state 1
_arc_in = int(in_syms["<eps>"])
_arc_out = int(out_syms["<eps>"])
_lfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
if add_disambig and add_opt_sil:
if _opt_sil_state == "":
_lfst.add_states(2)
_arc_in = int(in_syms["SIL"])
_arc_out = int(out_syms["<eps>"])
_lfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, cur_state + 1))
_opt_sil_state = cur_state
cur_state += 1
_arc_in = int(in_syms["#" + str(ndis + 1)])
# return to state 1
_lfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
cur_state += 1
else:
_arc_in = int(in_syms["SIL"])
_arc_out = int(out_syms["<eps>"])
_lfst.add_arc(_opt_sil_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, _opt_sil_state + 1))
_arc_in = int(in_syms["#" + str(ndis + 1)])
# return to state 1
_lfst.add_arc(_opt_sil_state + 1, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_lfst.set_start(0)
if invert:
_lfst.optimize()
_lfst.invert()
return _lfst
else:
return _lfst.optimize()
def load_kaldi_lex_as_lfst_reverse(self, kaldi_lex, add_disambig=False, add_position=False, add_opt_sil="SIL"):
"""
create lexicon fst with word-in/phoneme-out symbols
Note: optional silence still not tracking is correct or not.
"""
_opt_sil_state = ""
if add_disambig:
_lex, ndis = lex_add_disambig(kaldi_lex)
_has_disambig = False # initial tag
else:
_lex = self.data_io.read_file_to_list(kaldi_lex)
in_syms = self.word_tb
out_syms = self.phone_tb
_arc_weight = pynini.Weight("tropical", 0)
_lfst = pynini.Fst()
_lfst.add_state() # 0-state
cur_state = 0 # start from 0-based
for idx, line in enumerate(_lex):
if add_disambig:
line = line.strip().split()
if idx == 0:
_arc_in = int(in_syms["<eps>"])
_arc_out = int(out_syms["<eps>"])
_lfst.add_state()
_lfst.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_lfst.set_final(1)
cur_state = 1
_wd = line[0] # string
_ph_seq = line[1:] # list
if add_position:
if add_disambig and "#" in " ".join(_ph_seq):
_dis_symb = _ph_seq[-1]
_ph_seq = _ph_seq[:-1]
_has_disambig = True
if len(_ph_seq) == 1:
_ph_seq = [ _ph_seq[0] + "_S" ]
else:
for i, j in enumerate(_ph_seq):
if i == 0:
_ph_seq[i] = j + "_B"
elif i == len(_ph_seq):
_ph_seq[i] = j + "_E"
else:
_ph_seq[i] = j + "_I"
if _has_disambig:
_ph_seq.append(_dis_symb)
_has_disambig = False
_temp = [ _wd.split(" "), _ph_seq ]
for sub_idx, pair in enumerate(list(itertools.zip_longest(*_temp))):
_arc_in, _arc_out = pair
try:
_arc_in = int(in_syms[_arc_in])
except:
if _arc_in is None:
_arc_in = int(in_syms["<eps>"])
else:
raise ValueError(_arc_in, "this symbol not in input symbol table")
try:
_arc_out = int(out_syms[_arc_out])
except:
if _arc_out is None:
_arc_out = int(out_syms["<eps>"])
else:
raise ValueError(_arc_out, "this symbol not in output symbol table")
_lfst.add_state()
if sub_idx == 0:
# each word start from state 1
_lfst.add_arc(1, pynini.Arc(_arc_in, _arc_out, _arc_weight, cur_state + 1))
else:
_lfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, cur_state + 1))
cur_state += 1
# return to state 1
_arc_in = int(in_syms["<eps>"])
_arc_out = int(out_syms["<eps>"])
_lfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
if add_disambig and add_opt_sil:
if _opt_sil_state == "":
_lfst.add_states(2)
_arc_in = int(in_syms["<eps>"])
_arc_out = int(out_syms["SIL"])
_lfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, cur_state + 1))
_opt_sil_state = cur_state
cur_state += 1
_arc_out = int(out_syms["#" + str(ndis + 1)])
# return to state 1
_lfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
cur_state += 1
else:
_arc_in = int(in_syms["<eps>"])
_arc_out = int(out_syms["SIL"])
_lfst.add_arc(_opt_sil_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, _opt_sil_state + 1))
_arc_out = int(out_syms["#" + str(ndis + 1)])
# return to state 1
_lfst.add_arc(_opt_sil_state + 1, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_lfst.set_start(0)
return _lfst.optimize()
class TagHelper(GrammarHelper):
def __init__(self, words, phones, jieba_lex=None):
super().__init__(words, phones, jieba_lex)
self.sigma_star_fst_1state = self.gen_sigma_star_1state()
self.sigma_star_fst_2state = self.gen_sigma_star_2state()
self.sigma_star_fst_1state_filter = self.gen_sigma_star_1state_filter()
self.sigma_star_fst_2state_filter = self.gen_sigma_star_2state_filter()
def gen_sigma_star_1state(self):
"""
one-state with self loop FST which including all words.
"""
in_syms = self.word_tb
out_syms = self.word_tb
_sigma_star = pynini.Fst()
_sigma_star.add_state() # 0-state
_arc_weight = pynini.Weight("tropical", 0)
for key in self.word_tb.keys():
_arc_in = int(in_syms[key])
_arc_out = int(out_syms[key])
_sigma_star.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 0))
_sigma_star.set_start(0)
_sigma_star.set_final(0)
return _sigma_star
def gen_sigma_star_2state(self):
"""
two-states FST which including all words.
"""
in_syms = self.word_tb
out_syms = self.word_tb
_sigma_star = pynini.Fst()
_sigma_star.add_states(2) # 0 and 1 state
_arc_weight = pynini.Weight("tropical", 0)
for key in self.word_tb.keys():
_arc_in = int(in_syms[key])
_arc_out = int(out_syms[key])
_sigma_star.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_sigma_star.set_start(0)
_sigma_star.set_final(1)
return _sigma_star
def gen_sigma_star_1state_filter(self):
"""
one-state with self loop FST which including all words.
"""
in_syms = self.word_tb
out_syms = self.word_tb
_sigma_star = pynini.Fst()
_sigma_star.add_state() # 0-state
_arc_weight = pynini.Weight("tropical", 0)
for key in self.word_tb.keys():
_arc_in = int(in_syms[key])
_arc_out = int(out_syms["<eps>"])
_sigma_star.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 0))
_sigma_star.set_start(0)
_sigma_star.set_final(0)
return _sigma_star
def gen_sigma_star_2state_filter(self):
"""
two-states FST which including all words.
"""
in_syms = self.word_tb
out_syms = self.word_tb
_sigma_star = pynini.Fst()
_sigma_star.add_states(2) # 0 and 1 state
_arc_weight = pynini.Weight("tropical", 0)
for key in self.word_tb.keys():
_arc_in = int(in_syms[key])
_arc_out = int(out_syms["<eps>"])
_sigma_star.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_sigma_star.set_start(0)
_sigma_star.set_final(1)
return _sigma_star
def read_tag_grammar(self, x, write_words=None):
"""
reading grammar file and then return grammar fst
"""
x = self.data_io.read_file_to_list(x)
in_syms = self.word_tb
out_syms = self.word_tb
_arc_weight = pynini.Weight("tropical", 0)
_grammar_fst = pynini.Fst()
for idx, grammar in enumerate(x):
_gfst = pynini.Fst()
_gfst.add_state()
_gfst.set_start(0)
_gfst.set_final(0)
for i, j in enumerate(grammar):
if j == "<SIGMA_STAR>":
if i != 0:
_gfst = _gfst + self.sigma_star_fst_2state + self.sigma_star_fst_1state
else:
_gfst = _gfst + self.sigma_star_fst_1state
elif re.search("<.*>", j) and j != "<SIGMA_STAR>" and j != "<s>" and j != "</s>":
_temp = pynini.Fst()
_temp.add_states(2)
_arc_in = int(in_syms["<eps>"])
try:
_arc_out = int(out_syms[j])
except:
out_syms.update({j: str(len(out_syms.keys()))})
_arc_out = int(out_syms[j])
_temp.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_temp.set_start(0)
_temp.set_final(1)
_gfst = _gfst + _temp
else:
try:
_arc_in = int(in_syms[j])
_arc_out = int(out_syms[j])
except:
raise ValueError(j, "is not in symbol tables")
_temp = pynini.Fst()
_temp.add_states(2)
_temp.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_temp.set_start(0)
_temp.set_final(1)
_gfst = _gfst + _temp
# go back to state-0
_arc_in = int(in_syms["<eps>"])
_arc_out = int(out_syms["<eps>"])
cur_state = _gfst.num_states() - 1
_gfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, 0))
_gfst.set_final(0)
_grammar_fst.union(_gfst)
if write_words is not None:
if not isinstance(write_words, str):
raise(TypeError, "write_words is file path name, need a string")
self.data_io.write_word_tb(out_syms, write_words)
return _grammar_fst.optimize()
def read_sub_graph_grammar(self, x):
"""
reading grammar file and then return sub-graph grammar fst which will be used in Fst's intersection
"""
x = self.data_io.read_file_to_list(x)
in_syms = self.word_tb
out_syms = self.word_tb
_arc_weight = pynini.Weight("tropical", 0)
_grammar_fst = pynini.Fst()
for idx, grammar in enumerate(x):
_gfst = pynini.Fst()
_gfst.add_state()
_gfst.set_start(0)
_gfst.set_final(0)
for i, j in enumerate(grammar):
if j == "<SIGMA_STAR>":
if i != 0:
_gfst = _gfst + self.sigma_star_fst_2state + self.sigma_star_fst_1state
else:
_gfst = _gfst + self.sigma_star_fst_1state_filter
elif re.search("<.*>", j) and j != "<SIGMA_STAR>" and j != "<s>" and j != "</s>":
_temp = pynini.Fst()
_temp.add_states(2)
_arc_in = int(in_syms["<eps>"])
# no show <tag> in sub-graph
_arc_out = int(out_syms["<eps>"])
# # show <tag> in sub-graph
# try:
# _arc_out = int(out_syms[j])
# except:
# out_syms.update({j: str(len(out_syms.keys()))})
# _arc_out = int(out_syms[j])
_temp.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_temp.set_start(0)
_temp.set_final(1)
_gfst = _gfst + _temp
else:
try:
_arc_in = int(in_syms[j])
_arc_out = int(out_syms["<eps>"])
except:
raise ValueError(j, "is not in symbol tables")
_temp = pynini.Fst()
_temp.add_states(2)
_temp.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
_temp.set_start(0)
_temp.set_final(1)
_gfst = _gfst + _temp
# go back to state-0
_arc_in = int(in_syms["<eps>"])
_arc_out = int(out_syms["<eps>"])
cur_state = _gfst.num_states() - 1
_gfst.add_arc(cur_state, pynini.Arc(_arc_in, _arc_out, _arc_weight, 0))
_gfst.set_final(0)
_grammar_fst.union(_gfst)
return _grammar_fst.optimize()
def generate_replace_fst(self, fst_in, nonterminal_in, nonterminal_out, syms_tb):
_arc_weight = pynini.Weight("tropical", 0)
fst_in_seq = fst_to_linear_sequence(fst_in)
ne_fst = pynini.Fst()
ne_fst.add_state()
ne_fst.set_start(0)
ne_fst.set_final(0)
for i in fst_in_seq.split():
_fst = self.pair2fst(["0", i])
ne_fst = ne_fst + _fst
ne_fst.optimize()
terminal_in_fst = pynini.Fst()
terminal_in_fst.add_states(2)
_arc_in = int(syms_tb[nonterminal_in])
_arc_out = int(syms_tb["<eps>"])
terminal_in_fst.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
terminal_in_fst.set_start(0)
terminal_in_fst.set_final(1)
terminal_out_fst = pynini.Fst()
terminal_out_fst.add_states(2)
_arc_in = int(syms_tb[nonterminal_out])
_arc_out = int(syms_tb["<eps>"])
terminal_out_fst.add_arc(0, pynini.Arc(_arc_in, _arc_out, _arc_weight, 1))
terminal_out_fst.set_start(0)
terminal_out_fst.set_final(1)
replace_fst = self.sigma_star_fst_1state + terminal_in_fst + self.sigma_star_fst_2state_filter + self.sigma_star_fst_1state_filter + ne_fst + terminal_out_fst + self.sigma_star_fst_1state
return replace_fst
| 37.625
| 195
| 0.504255
| 2,711
| 21,973
| 3.666175
| 0.075618
| 0.042861
| 0.025757
| 0.034309
| 0.807325
| 0.771909
| 0.733575
| 0.713754
| 0.712043
| 0.702586
| 0
| 0.015323
| 0.397078
| 21,973
| 583
| 196
| 37.689537
| 0.734903
| 0.069039
| 0
| 0.79108
| 0
| 0
| 0.033164
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030516
| false
| 0
| 0.014085
| 0
| 0.077465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22d86487bfc2e4e4d43e0a413aad7d4800a8fca3
| 28,730
|
py
|
Python
|
src/whop/whopclient/api/licenses_api.py
|
whopio/whop-python-sdk
|
9b4da585bf81065a9a435cf6651d9a0cd206088c
|
[
"MIT"
] | null | null | null |
src/whop/whopclient/api/licenses_api.py
|
whopio/whop-python-sdk
|
9b4da585bf81065a9a435cf6651d9a0cd206088c
|
[
"MIT"
] | null | null | null |
src/whop/whopclient/api/licenses_api.py
|
whopio/whop-python-sdk
|
9b4da585bf81065a9a435cf6651d9a0cd206088c
|
[
"MIT"
] | null | null | null |
"""
Whop API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 1.0.10
Contact: support@whop.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from whop.whopclient.api_client import ApiClient, Endpoint as _Endpoint
from whop.whopclient.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from whop.whopclient.model.ban_license_by_key_response import BanLicenseByKeyResponse
from whop.whopclient.model.error_response import ErrorResponse
from whop.whopclient.model.get_license_by_key_response import GetLicenseByKeyResponse
from whop.whopclient.model.get_licenses_response import GetLicensesResponse
from whop.whopclient.model.reset_license_by_key_response import ResetLicenseByKeyResponse
from whop.whopclient.model.update_license_by_key_request import UpdateLicenseByKeyRequest
from whop.whopclient.model.update_license_by_key_response import UpdateLicenseByKeyResponse
from whop.whopclient.model.validate_license_by_key_request import ValidateLicenseByKeyRequest
from whop.whopclient.model.validate_license_by_key_response import ValidateLicenseByKeyResponse
class LicensesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.ban_license_by_key_endpoint = _Endpoint(
settings={
'response_type': (BanLicenseByKeyResponse,),
'auth': [
'Bearer'
],
'endpoint_path': '/v1/licenses/{key}/ban',
'operation_id': 'ban_license_by_key',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'key',
],
'required': [
'key',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'key':
(str,),
},
'attribute_map': {
'key': 'key',
},
'location_map': {
'key': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_license_by_key_endpoint = _Endpoint(
settings={
'response_type': (GetLicenseByKeyResponse,),
'auth': [
'Bearer',
'ClientID'
],
'endpoint_path': '/v1/licenses/{key}',
'operation_id': 'get_license_by_key',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'key',
],
'required': [
'key',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'key':
(str,),
},
'attribute_map': {
'key': 'key',
},
'location_map': {
'key': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_licenses_endpoint = _Endpoint(
settings={
'response_type': (GetLicensesResponse,),
'auth': [
'Bearer'
],
'endpoint_path': '/v1/licenses',
'operation_id': 'get_licenses',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'discord_account_id',
'page',
'start',
'end',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'discord_account_id':
(str,),
'page':
(int,),
'start':
(str,),
'end':
(str,),
},
'attribute_map': {
'discord_account_id': 'discord_account_id',
'page': 'page',
'start': 'start',
'end': 'end',
},
'location_map': {
'discord_account_id': 'query',
'page': 'query',
'start': 'query',
'end': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.reset_license_by_key_endpoint = _Endpoint(
settings={
'response_type': (ResetLicenseByKeyResponse,),
'auth': [
'Bearer',
'ClientID'
],
'endpoint_path': '/v1/licenses/{key}/reset',
'operation_id': 'reset_license_by_key',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'key',
],
'required': [
'key',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'key':
(str,),
},
'attribute_map': {
'key': 'key',
},
'location_map': {
'key': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.update_license_by_key_endpoint = _Endpoint(
settings={
'response_type': (UpdateLicenseByKeyResponse,),
'auth': [
'Bearer',
'ClientID'
],
'endpoint_path': '/v1/licenses/{key}',
'operation_id': 'update_license_by_key',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'key',
'update_license_by_key_request',
],
'required': [
'key',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'key':
(str,),
'update_license_by_key_request':
(UpdateLicenseByKeyRequest,),
},
'attribute_map': {
'key': 'key',
},
'location_map': {
'key': 'path',
'update_license_by_key_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.validate_license_by_key_endpoint = _Endpoint(
settings={
'response_type': (ValidateLicenseByKeyResponse,),
'auth': [
'Bearer',
'ClientID'
],
'endpoint_path': '/v1/licenses/{key}/validate',
'operation_id': 'validate_license_by_key',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'key',
'validate_license_by_key_request',
],
'required': [
'key',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'key':
(str,),
'validate_license_by_key_request':
(ValidateLicenseByKeyRequest,),
},
'attribute_map': {
'key': 'key',
},
'location_map': {
'key': 'path',
'validate_license_by_key_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def ban_license_by_key(
self,
key,
**kwargs
):
"""Ban License # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ban_license_by_key(key, async_req=True)
>>> result = thread.get()
Args:
key (str): Key of the license you wish to ban.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BanLicenseByKeyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['key'] = \
key
return self.ban_license_by_key_endpoint.call_with_http_info(**kwargs)
def get_license_by_key(
self,
key,
**kwargs
):
"""Fetch License # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_license_by_key(key, async_req=True)
>>> result = thread.get()
Args:
key (str): Key of the license you wish to fetch.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
GetLicenseByKeyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['key'] = \
key
return self.get_license_by_key_endpoint.call_with_http_info(**kwargs)
def get_licenses(
self,
**kwargs
):
"""Fetch All Licenses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_licenses(async_req=True)
>>> result = thread.get()
Keyword Args:
discord_account_id (str): ID of the Discord account for which you want to fetch licenses.. [optional]
page (int): Page number of license data to fetch.. [optional]
start (str): Start date of license creation. Date should be in the format YYYY-MM-DD.. [optional]
end (str): End date of license creation. Date should be in the format YYYY-MM-DD.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
GetLicensesResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.get_licenses_endpoint.call_with_http_info(**kwargs)
def reset_license_by_key(
self,
key,
**kwargs
):
"""Reset License # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_license_by_key(key, async_req=True)
>>> result = thread.get()
Args:
key (str): Key of the license you wish to reset.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ResetLicenseByKeyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['key'] = \
key
return self.reset_license_by_key_endpoint.call_with_http_info(**kwargs)
def update_license_by_key(
self,
key,
**kwargs
):
"""Update License # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_license_by_key(key, async_req=True)
>>> result = thread.get()
Args:
key (str): Key of the license you wish to update.
Keyword Args:
update_license_by_key_request (UpdateLicenseByKeyRequest): Details of license key metadata.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
UpdateLicenseByKeyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['key'] = \
key
return self.update_license_by_key_endpoint.call_with_http_info(**kwargs)
def validate_license_by_key(
self,
key,
**kwargs
):
"""Validate Key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_license_by_key(key, async_req=True)
>>> result = thread.get()
Args:
key (str): Key of the license you wish to validate.
Keyword Args:
validate_license_by_key_request (ValidateLicenseByKeyRequest): Details of license key metadata.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ValidateLicenseByKeyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['key'] = \
key
return self.validate_license_by_key_endpoint.call_with_http_info(**kwargs)
| 35.733831
| 124
| 0.49819
| 2,663
| 28,730
| 5.126549
| 0.078858
| 0.031644
| 0.03516
| 0.023733
| 0.862877
| 0.840683
| 0.803692
| 0.801275
| 0.756226
| 0.75315
| 0
| 0.002747
| 0.417125
| 28,730
| 803
| 125
| 35.778331
| 0.812493
| 0.361295
| 0
| 0.629699
| 1
| 0
| 0.214987
| 0.041383
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013158
| false
| 0
| 0.024436
| 0
| 0.050752
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3cb0730192fe84de069a2d185bb5e5ff4ed06ae
| 52,832
|
py
|
Python
|
sdk/python/pulumi_vault/approle/auth_backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 10
|
2019-10-07T17:44:18.000Z
|
2022-03-30T20:46:33.000Z
|
sdk/python/pulumi_vault/approle/auth_backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 79
|
2019-10-11T18:13:07.000Z
|
2022-03-31T21:09:41.000Z
|
sdk/python/pulumi_vault/approle/auth_backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-10-28T10:08:40.000Z
|
2020-03-17T14:20:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['AuthBackendRoleArgs', 'AuthBackendRole']
@pulumi.input_type
class AuthBackendRoleArgs:
def __init__(__self__, *,
role_name: pulumi.Input[str],
backend: Optional[pulumi.Input[str]] = None,
bind_secret_id: Optional[pulumi.Input[bool]] = None,
role_id: Optional[pulumi.Input[str]] = None,
secret_id_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
secret_id_num_uses: Optional[pulumi.Input[int]] = None,
secret_id_ttl: Optional[pulumi.Input[int]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AuthBackendRole resource.
:param pulumi.Input[str] role_name: The name of the role.
:param pulumi.Input[str] backend: The unique name of the auth backend to configure.
Defaults to `approle`.
:param pulumi.Input[bool] bind_secret_id: Whether or not to require `secret_id` to be
presented when logging in using this AppRole. Defaults to `true`.
:param pulumi.Input[str] role_id: The RoleID of this role. If not specified, one will be
auto-generated.
:param pulumi.Input[Sequence[pulumi.Input[str]]] secret_id_bound_cidrs: If set,
specifies blocks of IP addresses which can perform the login operation.
:param pulumi.Input[int] secret_id_num_uses: The number of times any particular SecretID
can be used to fetch a token from this AppRole, after which the SecretID will
expire. A value of zero will allow unlimited uses.
:param pulumi.Input[int] secret_id_ttl: The number of seconds after which any SecretID
expires.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_bound_cidrs: List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
:param pulumi.Input[int] token_explicit_max_ttl: If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
:param pulumi.Input[int] token_max_ttl: The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[bool] token_no_default_policy: If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
:param pulumi.Input[int] token_num_uses: The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
:param pulumi.Input[int] token_period: If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_policies: List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
:param pulumi.Input[int] token_ttl: The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[str] token_type: The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
pulumi.set(__self__, "role_name", role_name)
if backend is not None:
pulumi.set(__self__, "backend", backend)
if bind_secret_id is not None:
pulumi.set(__self__, "bind_secret_id", bind_secret_id)
if role_id is not None:
pulumi.set(__self__, "role_id", role_id)
if secret_id_bound_cidrs is not None:
pulumi.set(__self__, "secret_id_bound_cidrs", secret_id_bound_cidrs)
if secret_id_num_uses is not None:
pulumi.set(__self__, "secret_id_num_uses", secret_id_num_uses)
if secret_id_ttl is not None:
pulumi.set(__self__, "secret_id_ttl", secret_id_ttl)
if token_bound_cidrs is not None:
pulumi.set(__self__, "token_bound_cidrs", token_bound_cidrs)
if token_explicit_max_ttl is not None:
pulumi.set(__self__, "token_explicit_max_ttl", token_explicit_max_ttl)
if token_max_ttl is not None:
pulumi.set(__self__, "token_max_ttl", token_max_ttl)
if token_no_default_policy is not None:
pulumi.set(__self__, "token_no_default_policy", token_no_default_policy)
if token_num_uses is not None:
pulumi.set(__self__, "token_num_uses", token_num_uses)
if token_period is not None:
pulumi.set(__self__, "token_period", token_period)
if token_policies is not None:
pulumi.set(__self__, "token_policies", token_policies)
if token_ttl is not None:
pulumi.set(__self__, "token_ttl", token_ttl)
if token_type is not None:
pulumi.set(__self__, "token_type", token_type)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> pulumi.Input[str]:
"""
The name of the role.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: pulumi.Input[str]):
pulumi.set(self, "role_name", value)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
The unique name of the auth backend to configure.
Defaults to `approle`.
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter(name="bindSecretId")
def bind_secret_id(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not to require `secret_id` to be
presented when logging in using this AppRole. Defaults to `true`.
"""
return pulumi.get(self, "bind_secret_id")
@bind_secret_id.setter
def bind_secret_id(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "bind_secret_id", value)
@property
@pulumi.getter(name="roleId")
def role_id(self) -> Optional[pulumi.Input[str]]:
"""
The RoleID of this role. If not specified, one will be
auto-generated.
"""
return pulumi.get(self, "role_id")
@role_id.setter
def role_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_id", value)
@property
@pulumi.getter(name="secretIdBoundCidrs")
def secret_id_bound_cidrs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
If set,
specifies blocks of IP addresses which can perform the login operation.
"""
return pulumi.get(self, "secret_id_bound_cidrs")
@secret_id_bound_cidrs.setter
def secret_id_bound_cidrs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "secret_id_bound_cidrs", value)
@property
@pulumi.getter(name="secretIdNumUses")
def secret_id_num_uses(self) -> Optional[pulumi.Input[int]]:
"""
The number of times any particular SecretID
can be used to fetch a token from this AppRole, after which the SecretID will
expire. A value of zero will allow unlimited uses.
"""
return pulumi.get(self, "secret_id_num_uses")
@secret_id_num_uses.setter
def secret_id_num_uses(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "secret_id_num_uses", value)
@property
@pulumi.getter(name="secretIdTtl")
def secret_id_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The number of seconds after which any SecretID
expires.
"""
return pulumi.get(self, "secret_id_ttl")
@secret_id_ttl.setter
def secret_id_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "secret_id_ttl", value)
@property
@pulumi.getter(name="tokenBoundCidrs")
def token_bound_cidrs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
"""
return pulumi.get(self, "token_bound_cidrs")
@token_bound_cidrs.setter
def token_bound_cidrs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_bound_cidrs", value)
@property
@pulumi.getter(name="tokenExplicitMaxTtl")
def token_explicit_max_ttl(self) -> Optional[pulumi.Input[int]]:
"""
If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
"""
return pulumi.get(self, "token_explicit_max_ttl")
@token_explicit_max_ttl.setter
def token_explicit_max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_explicit_max_ttl", value)
@property
@pulumi.getter(name="tokenMaxTtl")
def token_max_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_max_ttl")
@token_max_ttl.setter
def token_max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_max_ttl", value)
@property
@pulumi.getter(name="tokenNoDefaultPolicy")
def token_no_default_policy(self) -> Optional[pulumi.Input[bool]]:
"""
If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
"""
return pulumi.get(self, "token_no_default_policy")
@token_no_default_policy.setter
def token_no_default_policy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "token_no_default_policy", value)
@property
@pulumi.getter(name="tokenNumUses")
def token_num_uses(self) -> Optional[pulumi.Input[int]]:
"""
The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
"""
return pulumi.get(self, "token_num_uses")
@token_num_uses.setter
def token_num_uses(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_num_uses", value)
@property
@pulumi.getter(name="tokenPeriod")
def token_period(self) -> Optional[pulumi.Input[int]]:
"""
If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
"""
return pulumi.get(self, "token_period")
@token_period.setter
def token_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_period", value)
@property
@pulumi.getter(name="tokenPolicies")
def token_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
"""
return pulumi.get(self, "token_policies")
@token_policies.setter
def token_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_policies", value)
@property
@pulumi.getter(name="tokenTtl")
def token_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_ttl")
@token_ttl.setter
def token_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_ttl", value)
@property
@pulumi.getter(name="tokenType")
def token_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
return pulumi.get(self, "token_type")
@token_type.setter
def token_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token_type", value)
@pulumi.input_type
class _AuthBackendRoleState:
def __init__(__self__, *,
backend: Optional[pulumi.Input[str]] = None,
bind_secret_id: Optional[pulumi.Input[bool]] = None,
role_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
secret_id_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
secret_id_num_uses: Optional[pulumi.Input[int]] = None,
secret_id_ttl: Optional[pulumi.Input[int]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AuthBackendRole resources.
:param pulumi.Input[str] backend: The unique name of the auth backend to configure.
Defaults to `approle`.
:param pulumi.Input[bool] bind_secret_id: Whether or not to require `secret_id` to be
presented when logging in using this AppRole. Defaults to `true`.
:param pulumi.Input[str] role_id: The RoleID of this role. If not specified, one will be
auto-generated.
:param pulumi.Input[str] role_name: The name of the role.
:param pulumi.Input[Sequence[pulumi.Input[str]]] secret_id_bound_cidrs: If set,
specifies blocks of IP addresses which can perform the login operation.
:param pulumi.Input[int] secret_id_num_uses: The number of times any particular SecretID
can be used to fetch a token from this AppRole, after which the SecretID will
expire. A value of zero will allow unlimited uses.
:param pulumi.Input[int] secret_id_ttl: The number of seconds after which any SecretID
expires.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_bound_cidrs: List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
:param pulumi.Input[int] token_explicit_max_ttl: If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
:param pulumi.Input[int] token_max_ttl: The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[bool] token_no_default_policy: If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
:param pulumi.Input[int] token_num_uses: The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
:param pulumi.Input[int] token_period: If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_policies: List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
:param pulumi.Input[int] token_ttl: The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[str] token_type: The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
if backend is not None:
pulumi.set(__self__, "backend", backend)
if bind_secret_id is not None:
pulumi.set(__self__, "bind_secret_id", bind_secret_id)
if role_id is not None:
pulumi.set(__self__, "role_id", role_id)
if role_name is not None:
pulumi.set(__self__, "role_name", role_name)
if secret_id_bound_cidrs is not None:
pulumi.set(__self__, "secret_id_bound_cidrs", secret_id_bound_cidrs)
if secret_id_num_uses is not None:
pulumi.set(__self__, "secret_id_num_uses", secret_id_num_uses)
if secret_id_ttl is not None:
pulumi.set(__self__, "secret_id_ttl", secret_id_ttl)
if token_bound_cidrs is not None:
pulumi.set(__self__, "token_bound_cidrs", token_bound_cidrs)
if token_explicit_max_ttl is not None:
pulumi.set(__self__, "token_explicit_max_ttl", token_explicit_max_ttl)
if token_max_ttl is not None:
pulumi.set(__self__, "token_max_ttl", token_max_ttl)
if token_no_default_policy is not None:
pulumi.set(__self__, "token_no_default_policy", token_no_default_policy)
if token_num_uses is not None:
pulumi.set(__self__, "token_num_uses", token_num_uses)
if token_period is not None:
pulumi.set(__self__, "token_period", token_period)
if token_policies is not None:
pulumi.set(__self__, "token_policies", token_policies)
if token_ttl is not None:
pulumi.set(__self__, "token_ttl", token_ttl)
if token_type is not None:
pulumi.set(__self__, "token_type", token_type)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
The unique name of the auth backend to configure.
Defaults to `approle`.
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter(name="bindSecretId")
def bind_secret_id(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not to require `secret_id` to be
presented when logging in using this AppRole. Defaults to `true`.
"""
return pulumi.get(self, "bind_secret_id")
@bind_secret_id.setter
def bind_secret_id(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "bind_secret_id", value)
@property
@pulumi.getter(name="roleId")
def role_id(self) -> Optional[pulumi.Input[str]]:
"""
The RoleID of this role. If not specified, one will be
auto-generated.
"""
return pulumi.get(self, "role_id")
@role_id.setter
def role_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_id", value)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the role.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_name", value)
@property
@pulumi.getter(name="secretIdBoundCidrs")
def secret_id_bound_cidrs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
If set,
specifies blocks of IP addresses which can perform the login operation.
"""
return pulumi.get(self, "secret_id_bound_cidrs")
@secret_id_bound_cidrs.setter
def secret_id_bound_cidrs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "secret_id_bound_cidrs", value)
@property
@pulumi.getter(name="secretIdNumUses")
def secret_id_num_uses(self) -> Optional[pulumi.Input[int]]:
"""
The number of times any particular SecretID
can be used to fetch a token from this AppRole, after which the SecretID will
expire. A value of zero will allow unlimited uses.
"""
return pulumi.get(self, "secret_id_num_uses")
@secret_id_num_uses.setter
def secret_id_num_uses(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "secret_id_num_uses", value)
@property
@pulumi.getter(name="secretIdTtl")
def secret_id_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The number of seconds after which any SecretID
expires.
"""
return pulumi.get(self, "secret_id_ttl")
@secret_id_ttl.setter
def secret_id_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "secret_id_ttl", value)
@property
@pulumi.getter(name="tokenBoundCidrs")
def token_bound_cidrs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
"""
return pulumi.get(self, "token_bound_cidrs")
@token_bound_cidrs.setter
def token_bound_cidrs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_bound_cidrs", value)
@property
@pulumi.getter(name="tokenExplicitMaxTtl")
def token_explicit_max_ttl(self) -> Optional[pulumi.Input[int]]:
"""
If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
"""
return pulumi.get(self, "token_explicit_max_ttl")
@token_explicit_max_ttl.setter
def token_explicit_max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_explicit_max_ttl", value)
@property
@pulumi.getter(name="tokenMaxTtl")
def token_max_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_max_ttl")
@token_max_ttl.setter
def token_max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_max_ttl", value)
@property
@pulumi.getter(name="tokenNoDefaultPolicy")
def token_no_default_policy(self) -> Optional[pulumi.Input[bool]]:
"""
If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
"""
return pulumi.get(self, "token_no_default_policy")
@token_no_default_policy.setter
def token_no_default_policy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "token_no_default_policy", value)
@property
@pulumi.getter(name="tokenNumUses")
def token_num_uses(self) -> Optional[pulumi.Input[int]]:
"""
The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
"""
return pulumi.get(self, "token_num_uses")
@token_num_uses.setter
def token_num_uses(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_num_uses", value)
@property
@pulumi.getter(name="tokenPeriod")
def token_period(self) -> Optional[pulumi.Input[int]]:
"""
If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
"""
return pulumi.get(self, "token_period")
@token_period.setter
def token_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_period", value)
@property
@pulumi.getter(name="tokenPolicies")
def token_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
"""
return pulumi.get(self, "token_policies")
@token_policies.setter
def token_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_policies", value)
@property
@pulumi.getter(name="tokenTtl")
def token_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_ttl")
@token_ttl.setter
def token_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_ttl", value)
@property
@pulumi.getter(name="tokenType")
def token_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
return pulumi.get(self, "token_type")
@token_type.setter
def token_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token_type", value)
class AuthBackendRole(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend: Optional[pulumi.Input[str]] = None,
bind_secret_id: Optional[pulumi.Input[bool]] = None,
role_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
secret_id_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
secret_id_num_uses: Optional[pulumi.Input[int]] = None,
secret_id_ttl: Optional[pulumi.Input[int]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages an AppRole auth backend role in a Vault server. See the [Vault
documentation](https://www.vaultproject.io/docs/auth/approle) for more
information.
## Example Usage
```python
import pulumi
import pulumi_vault as vault
approle = vault.AuthBackend("approle", type="approle")
example = vault.app_role.AuthBackendRole("example",
backend=approle.path,
role_name="test-role",
token_policies=[
"default",
"dev",
"prod",
])
```
## Import
AppRole authentication backend roles can be imported using the `path`, e.g.
```sh
$ pulumi import vault:appRole/authBackendRole:AuthBackendRole example auth/approle/role/test-role
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] backend: The unique name of the auth backend to configure.
Defaults to `approle`.
:param pulumi.Input[bool] bind_secret_id: Whether or not to require `secret_id` to be
presented when logging in using this AppRole. Defaults to `true`.
:param pulumi.Input[str] role_id: The RoleID of this role. If not specified, one will be
auto-generated.
:param pulumi.Input[str] role_name: The name of the role.
:param pulumi.Input[Sequence[pulumi.Input[str]]] secret_id_bound_cidrs: If set,
specifies blocks of IP addresses which can perform the login operation.
:param pulumi.Input[int] secret_id_num_uses: The number of times any particular SecretID
can be used to fetch a token from this AppRole, after which the SecretID will
expire. A value of zero will allow unlimited uses.
:param pulumi.Input[int] secret_id_ttl: The number of seconds after which any SecretID
expires.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_bound_cidrs: List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
:param pulumi.Input[int] token_explicit_max_ttl: If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
:param pulumi.Input[int] token_max_ttl: The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[bool] token_no_default_policy: If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
:param pulumi.Input[int] token_num_uses: The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
:param pulumi.Input[int] token_period: If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_policies: List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
:param pulumi.Input[int] token_ttl: The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[str] token_type: The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AuthBackendRoleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an AppRole auth backend role in a Vault server. See the [Vault
documentation](https://www.vaultproject.io/docs/auth/approle) for more
information.
## Example Usage
```python
import pulumi
import pulumi_vault as vault
approle = vault.AuthBackend("approle", type="approle")
example = vault.app_role.AuthBackendRole("example",
backend=approle.path,
role_name="test-role",
token_policies=[
"default",
"dev",
"prod",
])
```
## Import
AppRole authentication backend roles can be imported using the `path`, e.g.
```sh
$ pulumi import vault:appRole/authBackendRole:AuthBackendRole example auth/approle/role/test-role
```
:param str resource_name: The name of the resource.
:param AuthBackendRoleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AuthBackendRoleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend: Optional[pulumi.Input[str]] = None,
bind_secret_id: Optional[pulumi.Input[bool]] = None,
role_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
secret_id_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
secret_id_num_uses: Optional[pulumi.Input[int]] = None,
secret_id_ttl: Optional[pulumi.Input[int]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AuthBackendRoleArgs.__new__(AuthBackendRoleArgs)
__props__.__dict__["backend"] = backend
__props__.__dict__["bind_secret_id"] = bind_secret_id
__props__.__dict__["role_id"] = role_id
if role_name is None and not opts.urn:
raise TypeError("Missing required property 'role_name'")
__props__.__dict__["role_name"] = role_name
__props__.__dict__["secret_id_bound_cidrs"] = secret_id_bound_cidrs
__props__.__dict__["secret_id_num_uses"] = secret_id_num_uses
__props__.__dict__["secret_id_ttl"] = secret_id_ttl
__props__.__dict__["token_bound_cidrs"] = token_bound_cidrs
__props__.__dict__["token_explicit_max_ttl"] = token_explicit_max_ttl
__props__.__dict__["token_max_ttl"] = token_max_ttl
__props__.__dict__["token_no_default_policy"] = token_no_default_policy
__props__.__dict__["token_num_uses"] = token_num_uses
__props__.__dict__["token_period"] = token_period
__props__.__dict__["token_policies"] = token_policies
__props__.__dict__["token_ttl"] = token_ttl
__props__.__dict__["token_type"] = token_type
super(AuthBackendRole, __self__).__init__(
'vault:appRole/authBackendRole:AuthBackendRole',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
backend: Optional[pulumi.Input[str]] = None,
bind_secret_id: Optional[pulumi.Input[bool]] = None,
role_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
secret_id_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
secret_id_num_uses: Optional[pulumi.Input[int]] = None,
secret_id_ttl: Optional[pulumi.Input[int]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None) -> 'AuthBackendRole':
"""
Get an existing AuthBackendRole resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] backend: The unique name of the auth backend to configure.
Defaults to `approle`.
:param pulumi.Input[bool] bind_secret_id: Whether or not to require `secret_id` to be
presented when logging in using this AppRole. Defaults to `true`.
:param pulumi.Input[str] role_id: The RoleID of this role. If not specified, one will be
auto-generated.
:param pulumi.Input[str] role_name: The name of the role.
:param pulumi.Input[Sequence[pulumi.Input[str]]] secret_id_bound_cidrs: If set,
specifies blocks of IP addresses which can perform the login operation.
:param pulumi.Input[int] secret_id_num_uses: The number of times any particular SecretID
can be used to fetch a token from this AppRole, after which the SecretID will
expire. A value of zero will allow unlimited uses.
:param pulumi.Input[int] secret_id_ttl: The number of seconds after which any SecretID
expires.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_bound_cidrs: List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
:param pulumi.Input[int] token_explicit_max_ttl: If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
:param pulumi.Input[int] token_max_ttl: The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[bool] token_no_default_policy: If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
:param pulumi.Input[int] token_num_uses: The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
:param pulumi.Input[int] token_period: If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_policies: List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
:param pulumi.Input[int] token_ttl: The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[str] token_type: The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AuthBackendRoleState.__new__(_AuthBackendRoleState)
__props__.__dict__["backend"] = backend
__props__.__dict__["bind_secret_id"] = bind_secret_id
__props__.__dict__["role_id"] = role_id
__props__.__dict__["role_name"] = role_name
__props__.__dict__["secret_id_bound_cidrs"] = secret_id_bound_cidrs
__props__.__dict__["secret_id_num_uses"] = secret_id_num_uses
__props__.__dict__["secret_id_ttl"] = secret_id_ttl
__props__.__dict__["token_bound_cidrs"] = token_bound_cidrs
__props__.__dict__["token_explicit_max_ttl"] = token_explicit_max_ttl
__props__.__dict__["token_max_ttl"] = token_max_ttl
__props__.__dict__["token_no_default_policy"] = token_no_default_policy
__props__.__dict__["token_num_uses"] = token_num_uses
__props__.__dict__["token_period"] = token_period
__props__.__dict__["token_policies"] = token_policies
__props__.__dict__["token_ttl"] = token_ttl
__props__.__dict__["token_type"] = token_type
return AuthBackendRole(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def backend(self) -> pulumi.Output[Optional[str]]:
"""
The unique name of the auth backend to configure.
Defaults to `approle`.
"""
return pulumi.get(self, "backend")
@property
@pulumi.getter(name="bindSecretId")
def bind_secret_id(self) -> pulumi.Output[Optional[bool]]:
"""
Whether or not to require `secret_id` to be
presented when logging in using this AppRole. Defaults to `true`.
"""
return pulumi.get(self, "bind_secret_id")
@property
@pulumi.getter(name="roleId")
def role_id(self) -> pulumi.Output[str]:
"""
The RoleID of this role. If not specified, one will be
auto-generated.
"""
return pulumi.get(self, "role_id")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> pulumi.Output[str]:
"""
The name of the role.
"""
return pulumi.get(self, "role_name")
@property
@pulumi.getter(name="secretIdBoundCidrs")
def secret_id_bound_cidrs(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
If set,
specifies blocks of IP addresses which can perform the login operation.
"""
return pulumi.get(self, "secret_id_bound_cidrs")
@property
@pulumi.getter(name="secretIdNumUses")
def secret_id_num_uses(self) -> pulumi.Output[Optional[int]]:
"""
The number of times any particular SecretID
can be used to fetch a token from this AppRole, after which the SecretID will
expire. A value of zero will allow unlimited uses.
"""
return pulumi.get(self, "secret_id_num_uses")
@property
@pulumi.getter(name="secretIdTtl")
def secret_id_ttl(self) -> pulumi.Output[Optional[int]]:
"""
The number of seconds after which any SecretID
expires.
"""
return pulumi.get(self, "secret_id_ttl")
@property
@pulumi.getter(name="tokenBoundCidrs")
def token_bound_cidrs(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
"""
return pulumi.get(self, "token_bound_cidrs")
@property
@pulumi.getter(name="tokenExplicitMaxTtl")
def token_explicit_max_ttl(self) -> pulumi.Output[Optional[int]]:
"""
If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
"""
return pulumi.get(self, "token_explicit_max_ttl")
@property
@pulumi.getter(name="tokenMaxTtl")
def token_max_ttl(self) -> pulumi.Output[Optional[int]]:
"""
The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_max_ttl")
@property
@pulumi.getter(name="tokenNoDefaultPolicy")
def token_no_default_policy(self) -> pulumi.Output[Optional[bool]]:
"""
If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
"""
return pulumi.get(self, "token_no_default_policy")
@property
@pulumi.getter(name="tokenNumUses")
def token_num_uses(self) -> pulumi.Output[Optional[int]]:
"""
The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
"""
return pulumi.get(self, "token_num_uses")
@property
@pulumi.getter(name="tokenPeriod")
def token_period(self) -> pulumi.Output[Optional[int]]:
"""
If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
"""
return pulumi.get(self, "token_period")
@property
@pulumi.getter(name="tokenPolicies")
def token_policies(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
"""
return pulumi.get(self, "token_policies")
@property
@pulumi.getter(name="tokenTtl")
def token_ttl(self) -> pulumi.Output[Optional[int]]:
"""
The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_ttl")
@property
@pulumi.getter(name="tokenType")
def token_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
return pulumi.get(self, "token_type")
| 48.78301
| 145
| 0.655588
| 6,864
| 52,832
| 4.836101
| 0.041375
| 0.083175
| 0.080705
| 0.041753
| 0.948456
| 0.941829
| 0.939358
| 0.934358
| 0.926073
| 0.917879
| 0
| 0.000025
| 0.250379
| 52,832
| 1,082
| 146
| 48.828096
| 0.838148
| 0.419443
| 0
| 0.877395
| 1
| 0
| 0.105385
| 0.023554
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.001916
| 0.009579
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
432e70ecd4a974370eb4b6f99c82e70c1359d7eb
| 1,141
|
py
|
Python
|
tests/unit/models/test_AnsibleHostModel.py
|
sbbroot/epiphany
|
3518244bd2078cf07fa7ff5b021c4cf168c3bd83
|
[
"Apache-2.0"
] | 130
|
2019-01-08T19:03:56.000Z
|
2022-03-11T14:13:37.000Z
|
tests/unit/models/test_AnsibleHostModel.py
|
sbbroot/epiphany
|
3518244bd2078cf07fa7ff5b021c4cf168c3bd83
|
[
"Apache-2.0"
] | 1,925
|
2019-01-09T08:33:44.000Z
|
2022-03-31T14:37:13.000Z
|
tests/unit/models/test_AnsibleHostModel.py
|
sbbroot/epiphany
|
3518244bd2078cf07fa7ff5b021c4cf168c3bd83
|
[
"Apache-2.0"
] | 166
|
2019-01-08T16:03:37.000Z
|
2022-01-09T13:56:33.000Z
|
from typing import List
from cli.models.AnsibleHostModel import AnsibleOrderedHostModel
def test_sort():
"""
Test the `less` operator
"""
EXPECTED_HOSTS: List[AnsibleOrderedHostModel] = [
AnsibleOrderedHostModel('prefix-cluster-service-vm-0', '20.82.14.10'),
AnsibleOrderedHostModel('prefix-cluster-service-vm-1', '20.82.14.34'),
AnsibleOrderedHostModel('prefix-cluster-service-vm-2', '20.82.14.101'),
AnsibleOrderedHostModel('prefix-cluster-service-vm-3', '20.82.14.67'),
AnsibleOrderedHostModel('prefix-cluster-service-vm-4', '20.82.14.11'),
]
unordered_hosts: List[AnsibleOrderedHostModel] = [
AnsibleOrderedHostModel('prefix-cluster-service-vm-4', '20.82.14.11'),
AnsibleOrderedHostModel('prefix-cluster-service-vm-1', '20.82.14.34'),
AnsibleOrderedHostModel('prefix-cluster-service-vm-3', '20.82.14.67'),
AnsibleOrderedHostModel('prefix-cluster-service-vm-0', '20.82.14.10'),
AnsibleOrderedHostModel('prefix-cluster-service-vm-2', '20.82.14.101')
]
unordered_hosts.sort()
assert EXPECTED_HOSTS == unordered_hosts
| 38.033333
| 79
| 0.690622
| 133
| 1,141
| 5.879699
| 0.263158
| 0.370844
| 0.460358
| 0.549872
| 0.774936
| 0.774936
| 0.774936
| 0.774936
| 0.693095
| 0.693095
| 0
| 0.09514
| 0.152498
| 1,141
| 29
| 80
| 39.344828
| 0.713547
| 0.021034
| 0
| 0.421053
| 0
| 0
| 0.346957
| 0.245232
| 0
| 0
| 0
| 0
| 0.052632
| 1
| 0.052632
| true
| 0
| 0.105263
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4a2ed64ad4b0fa88cd4731f5c93a869d2aef68cd
| 145
|
py
|
Python
|
escalate/core/models/base_classes/__init__.py
|
darkreactions/ESCALATE
|
0020da00b81a2dd80d1c9fd72d2edf92b519e605
|
[
"MIT"
] | 11
|
2020-09-29T13:59:02.000Z
|
2022-03-23T04:57:52.000Z
|
escalate/core/models/base_classes/__init__.py
|
darkreactions/ESCALATE
|
0020da00b81a2dd80d1c9fd72d2edf92b519e605
|
[
"MIT"
] | 95
|
2019-11-18T20:10:49.000Z
|
2022-03-31T17:09:49.000Z
|
escalate/core/models/base_classes/__init__.py
|
darkreactions/ESCALATE
|
0020da00b81a2dd80d1c9fd72d2edf92b519e605
|
[
"MIT"
] | 2
|
2021-11-26T18:22:08.000Z
|
2022-03-31T11:57:10.000Z
|
from .chemistry_base_class import *
from .organization_base_class import *
from .workflow_base_class import *
from .abstract_base_models import *
| 36.25
| 38
| 0.841379
| 20
| 145
| 5.7
| 0.45
| 0.236842
| 0.394737
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 145
| 4
| 39
| 36.25
| 0.876923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4a540d2f1143355b1b5853f504522e690caec5d5
| 5,850
|
py
|
Python
|
Network.py
|
lixiaopeng123456/WDnCNN
|
708ec43176fbe75f8fdb4c7a4669dd6c2b579bb0
|
[
"MIT"
] | 1
|
2021-09-26T03:06:33.000Z
|
2021-09-26T03:06:33.000Z
|
Network.py
|
lixiaopeng123456/WDnCNN
|
708ec43176fbe75f8fdb4c7a4669dd6c2b579bb0
|
[
"MIT"
] | null | null | null |
Network.py
|
lixiaopeng123456/WDnCNN
|
708ec43176fbe75f8fdb4c7a4669dd6c2b579bb0
|
[
"MIT"
] | 2
|
2021-01-21T02:05:05.000Z
|
2021-03-18T12:54:17.000Z
|
import torch.nn as nn
import torch.nn.init as init
from cfg import par
class Denoising_Net_gray(nn.Module):
def __init__(self, depth=15, input_channel=par.input_channel, n_channel=72, output_channel=par.output_channel):
super(Denoising_Net_gray, self).__init__()
layers = []
for _ in range(depth):
layers.append(
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(n_channel, output_channel, kernel_size=(3, 3), padding=(1, 1), bias=False))
self.denoisingNet = nn.Sequential(*layers)
self.InputNet0 = nn.Sequential(
nn.Conv2d(input_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True)
)
self.InputNet1 = nn.Sequential(
nn.Conv2d(input_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True)
)
self.InputNet2 = nn.Sequential(
nn.Conv2d(input_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True)
)
self.InputNet3 = nn.Sequential(
nn.Conv2d(input_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True)
)
self._initialize_weights()
def forward(self, x):
x0 = self.InputNet0(x[:, [0, 4], :, :])
x1 = self.InputNet1(x[:, [1, 4], :, :])
x2 = self.InputNet2(x[:, [2, 4], :, :])
x3 = self.InputNet3(x[:, [3, 4], :, :])
x = x0 + x1 + x2 + x3
z = self.denoisingNet(x)
return x[:, 0:4, :, :] - z
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
# init.xavier_uniform_(m.weight)
init.kaiming_normal_(m.weight, a=0, mode='fan_in')
# init.orthogonal_(m.weight)
if m.bias is not None:
init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
class Denoising_Net_color(nn.Module):
def __init__(self, depth=12, input_channel=par.input_channel, n_channel=108, output_channel=par.output_channel):
super(Denoising_Net_color, self).__init__()
layers = []
for _ in range(depth):
layers.append(
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(n_channel, output_channel, kernel_size=(3, 3), padding=(1, 1), bias=False))
self.denoisingNet = nn.Sequential(*layers)
self.InputNet0 = nn.Sequential(
nn.Conv2d(input_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True)
)
self.InputNet1 = nn.Sequential(
nn.Conv2d(input_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True)
)
self.InputNet2 = nn.Sequential(
nn.Conv2d(input_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(n_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True)
)
self.InputNet3 = nn.Sequential(
nn.Conv2d(input_channel, n_channel, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True),
nn.ReLU(inplace=True)
)
self._initialize_weights()
def forward(self, x):
x0 = self.InputNet0(x[:, [0, 4, 8, 12], :, :])
x1 = self.InputNet1(x[:, [1, 5, 9, 12], :, :])
x2 = self.InputNet2(x[:, [2, 6, 10, 12], :, :])
x3 = self.InputNet3(x[:, [3, 7, 11, 12], :, :])
x = x0 + x1 + x2 + x3
z = self.denoisingNet(x)
return x[:, 0:12, :, :] - z
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
# init.xavier_uniform_(m.weight)
init.kaiming_normal_(m.weight, a=0, mode='fan_in')
# init.orthogonal_(m.weight)
if m.bias is not None:
init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
| 44.656489
| 117
| 0.545641
| 781
| 5,850
| 3.930858
| 0.112676
| 0.024756
| 0.09772
| 0.117264
| 0.954397
| 0.921173
| 0.905537
| 0.882736
| 0.852769
| 0.852769
| 0
| 0.054435
| 0.296581
| 5,850
| 130
| 118
| 45
| 0.691616
| 0.019658
| 0
| 0.747664
| 0
| 0
| 0.002143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056075
| false
| 0
| 0.028037
| 0
| 0.121495
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a781c3da06009be0cc6aca58e7821288420db19
| 5,157
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/ShowIpv4VrfAllInterface/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxr/tests/ShowIpv4VrfAllInterface/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxr/tests/ShowIpv4VrfAllInterface/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"GigabitEthernet0/0/0/0": {
"int_status": "up",
"ipv4": {
"10.1.3.1/24": {"ip": "10.1.3.1", "prefix_length": "24"},
"broadcast_forwarding": "disabled",
"icmp_redirects": "never sent",
"icmp_replies": "never sent",
"icmp_unreachables": "always sent",
"mtu": 1514,
"mtu_available": 1500,
"proxy_arp": "disabled",
"table_id": "0xe0000000",
},
"multicast_groups": [
"224.0.0.2",
"224.0.0.1",
"224.0.0.2",
"224.0.0.5",
"224.0.0.6",
],
"oper_status": "up",
"vrf": "default",
"vrf_id": "0x60000000",
},
"GigabitEthernet0/0/0/1": {
"int_status": "up",
"ipv4": {
"10.1.5.1/24": {"ip": "10.1.5.1", "prefix_length": "24", "route_tag": 50},
"10.2.2.2/24": {"ip": "10.2.2.2", "prefix_length": "24", "secondary": True},
"broadcast_forwarding": "disabled",
"icmp_redirects": "never sent",
"icmp_replies": "never sent",
"icmp_unreachables": "always sent",
"mtu": 1514,
"mtu_available": 1500,
"proxy_arp": "disabled",
"table_id": "0xe0000010",
},
"multicast_groups": ["224.0.0.2", "224.0.0.1"],
"oper_status": "up",
"vrf": "VRF1",
"vrf_id": "0x60000001",
},
"GigabitEthernet0/0/0/2": {
"int_status": "up",
"ipv4": {
"10.186.5.1/24": {"ip": "10.186.5.1", "prefix_length": "24"},
"broadcast_forwarding": "disabled",
"icmp_redirects": "never sent",
"icmp_replies": "never sent",
"icmp_unreachables": "always sent",
"mtu": 1514,
"mtu_available": 1500,
"proxy_arp": "disabled",
"table_id": "0xe0000011",
},
"multicast_groups": ["224.0.0.2", "224.0.0.1"],
"oper_status": "up",
"vrf": "VRF2",
"vrf_id": "0x60000002",
},
"GigabitEthernet0/0/0/3": {
"int_status": "up",
"ipv4": {
"10.1.2.1/24": {"ip": "10.1.2.1", "prefix_length": "24"},
"broadcast_forwarding": "disabled",
"icmp_redirects": "never sent",
"icmp_replies": "never sent",
"icmp_unreachables": "always sent",
"mtu": 1514,
"mtu_available": 1500,
"proxy_arp": "disabled",
"table_id": "0xe0000000",
},
"multicast_groups": [
"224.0.0.2",
"224.0.0.1",
"224.0.0.2",
"224.0.0.5",
"224.0.0.6",
],
"oper_status": "up",
"vrf": "default",
"vrf_id": "0x60000000",
},
"GigabitEthernet0/0/0/4": {
"int_status": "up",
"ipv4": {
"10.69.111.111/32": {"ip": "10.69.111.111", "prefix_length": "32"},
"broadcast_forwarding": "disabled",
"icmp_redirects": "never sent",
"icmp_replies": "never sent",
"icmp_unreachables": "always sent",
"mtu": 1514,
"mtu_available": 1500,
"proxy_arp": "disabled",
"table_id": "0xe0000000",
"unnumbered": {"unnumbered_intf_ref": "Loopback11"},
},
"multicast_groups": ["224.0.0.2", "224.0.0.1"],
"oper_status": "up",
"vrf": "default",
"vrf_id": "0x60000000",
},
"GigabitEthernet0/0/0/5": {
"int_status": "shutdown",
"oper_status": "down",
"vrf": "default",
"vrf_id": "0x60000000",
},
"GigabitEthernet0/0/0/6": {
"int_status": "shutdown",
"oper_status": "down",
"vrf": "default",
"vrf_id": "0x60000000",
},
"Loopback0": {
"int_status": "up",
"ipv4": {
"10.4.1.1/32": {"ip": "10.4.1.1", "prefix_length": "32"},
"broadcast_forwarding": "disabled",
"icmp_redirects": "never sent",
"icmp_replies": "never sent",
"icmp_unreachables": "always sent",
"mtu": 1500,
"mtu_available": 1500,
"proxy_arp": "disabled",
"table_id": "0xe0000000",
},
"oper_status": "up",
"vrf": "default",
"vrf_id": "0x60000000",
},
"Loopback11": {
"int_status": "up",
"ipv4": {
"10.69.111.111/32": {"ip": "10.69.111.111", "prefix_length": "32"},
"broadcast_forwarding": "disabled",
"icmp_redirects": "never sent",
"icmp_replies": "never sent",
"icmp_unreachables": "always sent",
"mtu": 1500,
"mtu_available": 1500,
"proxy_arp": "disabled",
"table_id": "0xe0000000",
},
"oper_status": "up",
"vrf": "default",
"vrf_id": "0x60000000",
},
"MgmtEth0/0/CPU0/0": {
"int_status": "shutdown",
"oper_status": "down",
"vrf": "default",
"vrf_id": "0x60000000",
},
}
| 32.433962
| 88
| 0.453364
| 523
| 5,157
| 4.286807
| 0.131931
| 0.021409
| 0.035682
| 0.053524
| 0.876896
| 0.839429
| 0.815343
| 0.815343
| 0.807315
| 0.807315
| 0
| 0.143924
| 0.350591
| 5,157
| 158
| 89
| 32.639241
| 0.52553
| 0
| 0
| 0.734177
| 0
| 0
| 0.467326
| 0.029862
| 0
| 0
| 0.032965
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a951ecefde24f3bd89061464cfa4dc2bc81af2b
| 129
|
py
|
Python
|
patch_classifier/classifier/utils.py
|
taebinkim7/tma-classifier
|
686b1858e3b2d2e17bab95a963f6baacf302456e
|
[
"MIT"
] | null | null | null |
patch_classifier/classifier/utils.py
|
taebinkim7/tma-classifier
|
686b1858e3b2d2e17bab95a963f6baacf302456e
|
[
"MIT"
] | null | null | null |
patch_classifier/classifier/utils.py
|
taebinkim7/tma-classifier
|
686b1858e3b2d2e17bab95a963f6baacf302456e
|
[
"MIT"
] | null | null | null |
import os
from joblib import dump
def save_clf_dataset(dataset, fpath, compress=3):
dump(dataset, fpath, compress=compress)
| 21.5
| 49
| 0.775194
| 19
| 129
| 5.157895
| 0.631579
| 0.244898
| 0.408163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009009
| 0.139535
| 129
| 5
| 50
| 25.8
| 0.873874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
434a88523926344daf9a95355a842d17cfedb958
| 10,450
|
py
|
Python
|
test/integrationtests/test_integration_test_single_slice.py
|
philippgualdi/PyQMRI
|
5de3a7da5feb2d01b746acd47d1dba91a8a1417e
|
[
"Apache-2.0"
] | null | null | null |
test/integrationtests/test_integration_test_single_slice.py
|
philippgualdi/PyQMRI
|
5de3a7da5feb2d01b746acd47d1dba91a8a1417e
|
[
"Apache-2.0"
] | null | null | null |
test/integrationtests/test_integration_test_single_slice.py
|
philippgualdi/PyQMRI
|
5de3a7da5feb2d01b746acd47d1dba91a8a1417e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 12 11:26:41 2019
@author: omaier
"""
import pytest
import os
from os.path import join as pjoin
import pyqmri
import shutil
import h5py
import numpy as np
data_dir = os.path.realpath(pjoin(os.path.dirname(__file__), '..'))
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV():
assert pyqmri.run(data=pjoin(data_dir, 'smalltest.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini')
) is None
@pytest.mark.integration_test
def test_VFA_model_imagespace_TGV():
assert pyqmri.run(data=pjoin(data_dir, 'smalltest.h5'),
model='VFA',
imagespace=True,
config=pjoin(data_dir, 'default.ini')
) is None
@pytest.mark.integration_test
def test_General_model_kspace_TGV():
assert pyqmri.run(data=pjoin(data_dir, 'smalltest.h5'),
config=pjoin(data_dir, 'default.ini'),
modelfile=pjoin(data_dir, 'models.ini')
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TV():
assert pyqmri.run(data=pjoin(data_dir, 'smalltest.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
reg_type='TV'
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart():
assert pyqmri.run(data=pjoin(data_dir, 'VFA_cart_smalltest.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=False,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_imageguess_CG(
gen_noimageguess):
assert pyqmri.run(data=pjoin(data_dir, 'VFA_cart_test_imageguess.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=False,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_imageguess(
gen_noimageguess):
assert pyqmri.run(data=pjoin(data_dir, 'VFA_cart_test_imageguess.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=False,
useCGguess=False,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_coilguess(
gen_data_nocoils):
assert pyqmri.run(data=pjoin(data_dir, 'VFA_cart_test_coilguess.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=False,
useCGguess=False,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_coilguess_radial(
gen_data_nocoils_radial):
assert pyqmri.run(
data=pjoin(data_dir, 'VFA_radial_test_coilguess.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=True,
useCGguess=False,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_double():
assert pyqmri.run(data=pjoin(data_dir, 'smalltest.h5'),
model='VFA',
double_precision=True,
config=pjoin(data_dir, 'default.ini')
) is None
@pytest.mark.integration_test
def test_VFA_model_imagespace_TGV_double():
assert pyqmri.run(data=pjoin(data_dir, 'smalltest.h5'),
model='VFA',
imagespace=True,
double_precision=True,
config=pjoin(data_dir, 'default.ini')
) is None
@pytest.mark.integration_test
def test_General_model_kspace_TGV_double():
assert pyqmri.run(data=pjoin(data_dir, 'smalltest.h5'),
config=pjoin(data_dir, 'default.ini'),
modelfile=pjoin(data_dir, 'models.ini'),
double_precision=True,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TV_double():
assert pyqmri.run(data=pjoin(data_dir, 'smalltest.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
reg_type='TV',
double_precision=True,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_double():
assert pyqmri.run(data=pjoin(data_dir, 'VFA_cart_smalltest.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=False,
double_precision=True,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_imageguess_CG_double(
gen_noimageguess):
assert pyqmri.run(data=pjoin(data_dir, 'VFA_cart_test_imageguess.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=False,
double_precision=True,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_imageguess_double(
gen_noimageguess):
assert pyqmri.run(data=pjoin(data_dir, 'VFA_cart_test_imageguess.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=False,
useCGguess=False,
double_precision=True,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_coilguess_double(
gen_data_nocoils):
assert pyqmri.run(data=pjoin(data_dir, 'VFA_cart_test_coilguess.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=False,
useCGguess=False,
double_precision=True,
) is None
@pytest.mark.integration_test
def test_VFA_model_kspace_TGV_cart_coilguess_radial_double(
gen_data_nocoils_radial):
assert pyqmri.run(
data=pjoin(data_dir, 'VFA_radial_test_coilguess.h5'),
model='VFA',
config=pjoin(data_dir, 'default.ini'),
trafo=True,
useCGguess=False,
double_precision=True,
) is None
@pytest.fixture(scope="function")
def gen_noimageguess():
file = h5py.File(pjoin(data_dir, 'VFA_cart_smalltest.h5'), 'r')
Coils = file["Coils"][()]
real_dat = file["real_dat"][()]
imag_dat = file["imag_dat"][()]
fa_corr = file["fa_corr"][()]
image_dimensions = file.attrs["image_dimensions"]
fa = file.attrs["fa"][()]
TR = file.attrs["TR"]
file_out = h5py.File(pjoin(data_dir, 'VFA_cart_test_imageguess.h5'), 'w')
slices = 1
Coils = np.repeat(Coils, repeats=slices, axis=1)
real_dat = np.repeat(real_dat, repeats=slices, axis=2)
imag_dat = np.repeat(imag_dat, repeats=slices, axis=2)
fa_corr = np.repeat(fa_corr, repeats=slices, axis=0)
file_out["Coils"] = Coils
file_out["real_dat"] = real_dat
file_out["imag_dat"] = imag_dat
file_out["fa_corr"] = fa_corr
image_dimensions[2] = slices
file_out.attrs["TR"] = TR
file_out.attrs["fa"] = fa
file_out.attrs["flip_angle(s)"] = fa
file_out.attrs["image_dimensions"] = image_dimensions
file_out.close()
file.close()
@pytest.fixture(scope="function")
def gen_data_nocoils():
file = h5py.File(pjoin(data_dir, 'VFA_cart_smalltest.h5'), 'r')
real_dat = file["real_dat"][()]
imag_dat = file["imag_dat"][()]
fa_corr = file["fa_corr"][()]
image_dimensions = file.attrs["image_dimensions"]
fa = file.attrs["fa"][()]
TR = file.attrs["TR"]
file_out = h5py.File(pjoin(data_dir, 'VFA_cart_test_coilguess.h5'), 'w')
slices = 1
real_dat = np.repeat(real_dat, repeats=slices, axis=2)
imag_dat = np.repeat(imag_dat, repeats=slices, axis=2)
fa_corr = np.repeat(fa_corr, repeats=slices, axis=0)
file_out["real_dat"] = real_dat
file_out["imag_dat"] = imag_dat
file_out["fa_corr"] = fa_corr
image_dimensions[2] = slices
file_out.attrs["TR"] = TR
file_out.attrs["fa"] = fa
file_out.attrs["flip_angle(s)"] = fa
file_out.attrs["image_dimensions"] = image_dimensions
file_out.close()
file.close()
@pytest.fixture(scope="function")
def gen_data_nocoils_radial():
file = h5py.File(pjoin(data_dir, 'smalltest.h5'), 'r')
real_dat = file["real_dat"][()]
imag_dat = file["imag_dat"][()]
real_traj = file["real_traj"][()]
imag_traj = file["imag_traj"][()]
fa_corr = file["fa_corr"][()]
dcf_norm = file.attrs["data_normalized_with_dcf"]
image_dimensions = file.attrs["image_dimensions"]
fa = file.attrs["fa"][()]
TR = file.attrs["TR"]
file_out = h5py.File(pjoin(data_dir, 'VFA_radial_test_coilguess.h5'), 'w')
slices = 1
real_dat = np.repeat(real_dat, repeats=slices, axis=2)
imag_dat = np.repeat(imag_dat, repeats=slices, axis=2)
fa_corr = np.repeat(fa_corr, repeats=slices, axis=0)
file_out["real_dat"] = real_dat
file_out["imag_dat"] = imag_dat
file_out["real_traj"] = real_traj
file_out["imag_traj"] = imag_traj
file_out["fa_corr"] = fa_corr
image_dimensions[2] = slices
file_out.attrs["TR"] = TR
file_out.attrs["fa"] = fa
file_out.attrs["flip_angle(s)"] = fa
file_out.attrs["image_dimensions"] = image_dimensions
file_out.attrs["data_normalized_with_dcf"] = dcf_norm
file_out.close()
file.close()
@pytest.fixture(autouse=True, scope="session")
def clean_up():
yield
try:
if os.path.exists(pjoin(data_dir, 'PyQMRI_out')):
shutil.rmtree(pjoin(data_dir, 'PyQMRI_out'))
if os.path.isfile(pjoin(data_dir, 'VFA_cart_test_imageguess.h5')):
os.remove(pjoin(data_dir, 'VFA_cart_test_imageguess.h5'))
if os.path.isfile(pjoin(data_dir, 'VFA_cart_test_coilguess.h5')):
os.remove(pjoin(data_dir, 'VFA_cart_test_coilguess.h5'))
if os.path.isfile(pjoin(data_dir, 'VFA_radial_test_coilguess.h5')):
os.remove(pjoin(data_dir, 'VFA_radial_test_coilguess.h5'))
except OSError as e:
print("Error: %s - %s." % (e.filename, e.strerror))
| 32.153846
| 78
| 0.602105
| 1,318
| 10,450
| 4.477997
| 0.094082
| 0.06286
| 0.105727
| 0.053372
| 0.913419
| 0.894781
| 0.88631
| 0.880549
| 0.861742
| 0.837004
| 0
| 0.008853
| 0.275789
| 10,450
| 324
| 79
| 32.253086
| 0.77101
| 0.009187
| 0
| 0.76
| 0
| 0
| 0.133978
| 0.057129
| 0
| 0
| 0
| 0
| 0.072
| 1
| 0.088
| false
| 0
| 0.028
| 0
| 0.116
| 0.004
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
437a93df76e8f128afd9e0c239b88f9dc9715424
| 48,913
|
py
|
Python
|
tests/test_text_extraction.py
|
MUSC-TBIC/etude-engine
|
943608ae3458bfcecc5e1c0b24fb3aa5c8bc0cad
|
[
"Apache-2.0"
] | 9
|
2018-11-03T20:49:41.000Z
|
2021-10-30T23:11:28.000Z
|
tests/test_text_extraction.py
|
MUSC-TBIC/etude-engine
|
943608ae3458bfcecc5e1c0b24fb3aa5c8bc0cad
|
[
"Apache-2.0"
] | 1
|
2019-06-04T17:17:41.000Z
|
2019-06-04T17:17:41.000Z
|
tests/test_text_extraction.py
|
MUSC-TBIC/etude-engine
|
943608ae3458bfcecc5e1c0b24fb3aa5c8bc0cad
|
[
"Apache-2.0"
] | null | null | null |
import os
import tempfile
import json
import args_and_configs
import text_extraction
#############################################
## Test extracting various plaintext patterns
#############################################
def test_plaintext_split_on_spaces():
raw_content = 'Hello , world'
strict_starts = \
text_extraction.extract_annotations_plaintext( offset_mapping = {} ,
raw_content = raw_content ,
delimiter = ' ' ,
tag_name = 'Token' )
expected_output = \
{ '0' : [ { 'type': 'Token',
'end_pos': '5',
'raw_text': 'Hello',
'begin_pos': '0' } ] ,
'6' : [ { 'type': 'Token' ,
'end_pos': '7' ,
'raw_text': ',' ,
'begin_pos': '6' } ] ,
'8' : [ { 'type': 'Token' ,
'end_pos': '13' ,
'raw_text': 'world' ,
'begin_pos': '8' } ]
}
assert strict_starts == expected_output
def test_plaintext_split_on_spaces_with_final_space():
raw_content = 'Hello , world '
strict_starts = \
text_extraction.extract_annotations_plaintext( offset_mapping = {} ,
raw_content = raw_content ,
delimiter = ' ' ,
tag_name = 'Token' )
expected_output = \
{ '0' : [ { 'type': 'Token',
'end_pos': '5',
'raw_text': 'Hello',
'begin_pos': '0' } ] ,
'6' : [ { 'type': 'Token' ,
'end_pos': '7' ,
'raw_text': ',' ,
'begin_pos': '6' } ] ,
'8' : [ { 'type': 'Token' ,
'end_pos': '13' ,
'raw_text': 'world' ,
'begin_pos': '8' } ]
}
assert strict_starts == expected_output
def test_plaintext_split_on_spaces_with_initial_space():
raw_content = ' Hello , world'
strict_starts = \
text_extraction.extract_annotations_plaintext( offset_mapping = {} ,
raw_content = raw_content ,
delimiter = ' ' ,
tag_name = 'Token' )
expected_output = \
{ '2' : [ { 'type': 'Token',
'end_pos': '7',
'raw_text': 'Hello',
'begin_pos': '2' } ] ,
'8' : [ { 'type': 'Token' ,
'end_pos': '9' ,
'raw_text': ',' ,
'begin_pos': '8' } ] ,
'10' : [ { 'type': 'Token' ,
'end_pos': '15' ,
'raw_text': 'world' ,
'begin_pos': '10' } ]
}
assert strict_starts == expected_output
#############################################
## Test extracting various xml patterns
#############################################
def test_extracting_datetime_from_0005_gs():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
strict_starts = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
annotation_path = \
'./TAGS/DATE' ,
tag_name = 'DateTime' ,
begin_attribute = 'start' ,
end_attribute = 'end' )
expected_output = \
{ '2404' : [ { 'type': 'DateTime' ,
'begin_pos': '2404' ,
'end_pos': '2410' ,
'raw_text': None } ] ,
'87' : [ { 'type': 'DateTime' ,
'begin_pos': '87' ,
'end_pos': '97' ,
'raw_text': None } ]
}
assert strict_starts == expected_output
def test_default_namespace_same_as_empty():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
strict_starts_default = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
annotation_path = \
'./TAGS/DATE' ,
tag_name = 'DateTime' ,
begin_attribute = 'start' ,
end_attribute = 'end' )
strict_starts_empty = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
annotation_path = \
'./TAGS/DATE' ,
tag_name = 'DateTime' ,
namespaces = {} ,
begin_attribute = 'start' ,
end_attribute = 'end' )
assert strict_starts_default == strict_starts_empty
def test_extracting_sentences_from_0005_gs():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
config_file = 'config/uima_sentences.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
strict_starts = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
namespaces = namespaces ,
annotation_path = \
'.//type:Sentence' ,
tag_name = 'Sentence' ,
begin_attribute = 'begin' ,
end_attribute = 'end' )
assert strict_starts == {}
def test_extracting_sentences_from_reference_standard():
ingest_file = 'tests/data/sentences/992321.sentences.xmi'
config_file = 'config/uima_sentences.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
strict_starts = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
namespaces = namespaces ,
annotation_path = \
'.//type4:Sentence' ,
tag_name = 'Sentence' ,
begin_attribute = 'begin' ,
end_attribute = 'end' )
assert len( strict_starts ) == 113
def test_extracting_sentences_from_WebAnno():
ingest_file = 'tests/data/sentences/992321.sentences.WebAnno.xmi'
config_file = 'config/uima_sentences.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
strict_starts = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
namespaces = namespaces ,
annotation_path = \
'.//type4:Sentence' ,
tag_name = 'Sentence' ,
begin_attribute = 'begin' ,
end_attribute = 'end' )
assert len( strict_starts ) == 45
def test_extracting_sentences_from_CTAKES4_OpenNLP1_8():
ingest_file = 'tests/data/sentences/992321-OUT.xmi'
config_file = 'config/uima_sentences.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
strict_starts = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
namespaces = namespaces ,
annotation_path = \
'.//type:Sentence' ,
tag_name = 'Sentence' ,
begin_attribute = 'begin' ,
end_attribute = 'end' )
assert len( strict_starts ) == 82
## Passing attributes through
def test_extracting_no_optional_attributes():
ingest_file = 'tests/data/013_Conditional_Problem.xmi'
config_file = 'config/webanno_problems_allergies_xmi.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
strict_starts = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
annotation_path = \
'./custom:Problems' ,
tag_name = 'Problem' ,
namespaces = namespaces ,
begin_attribute = 'begin' ,
end_attribute = 'end' ,
optional_attributes = [] )
expected_output = \
{ '181' : [ { 'type': 'Problem' ,
'begin_pos': '181' ,
'end_pos': '188' ,
'raw_text': None } ] ,
'218' : [ { 'type': 'Problem' ,
'begin_pos': '218' ,
'end_pos': '224' ,
'raw_text': None } ]
}
assert strict_starts == expected_output
def test_extracting_with_and_without_optional_attributes():
ingest_file = 'tests/data/013_Conditional_Problem.xmi'
config_file = 'config/webanno_problems_allergies_xmi.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
strict_starts_no_opt_attributes = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
annotation_path = \
'./custom:Problems' ,
tag_name = 'Problem' ,
namespaces = namespaces ,
begin_attribute = 'begin' ,
end_attribute = 'end' ,
optional_attributes = [] )
strict_starts_with_opt_attributes = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = {} ,
annotation_path = \
'./custom:Problems' ,
tag_name = 'Problem' ,
namespaces = namespaces ,
begin_attribute = 'begin' ,
end_attribute = 'end' ,
optional_attributes = \
patterns[ 0 ][ 'optional_attributes' ] )
expected_output_no_opt_attributes = \
{ '181' : [ { 'type': 'Problem' ,
'begin_pos': '181' ,
'end_pos': '188' ,
'raw_text': None } ] ,
'218' : [ { 'type': 'Problem' ,
'begin_pos': '218' ,
'end_pos': '224' ,
'raw_text': None } ]
}
expected_output_with_opt_attributes = \
{ '181' : [ { 'type': 'Problem' ,
'begin_pos': '181' ,
'end_pos': '188' ,
'raw_text': None ,
'conditional' : 'true' ,
'generic' : 'false' ,
'historical' : 'false' ,
'negated' : 'false' ,
'not_patient' : 'true' ,
'uncertain' : 'false' } ] ,
'218' : [ { 'type': 'Problem' ,
'begin_pos': '218' ,
'end_pos': '224' ,
'raw_text': None ,
'conditional' : 'false' ,
'generic' : 'false' ,
'historical' : 'true' ,
'negated' : 'false' ,
'not_patient' : 'false' ,
'uncertain' : 'true' } ]
}
assert strict_starts_no_opt_attributes == \
expected_output_no_opt_attributes
assert strict_starts_with_opt_attributes == \
expected_output_with_opt_attributes
assert strict_starts_no_opt_attributes != \
expected_output_with_opt_attributes
assert strict_starts_with_opt_attributes != \
expected_output_no_opt_attributes
def test_extracting_with_and_without_optional_attributes_called_by_parent():
ingest_file = 'tests/data/013_Conditional_Problem.xmi'
config_file = 'config/webanno_problems_allergies_xmi.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
patterns.pop()
offset_mapping , annots_with_opt_attributes = \
text_extraction.extract_annotations( ingest_file ,
namespaces = namespaces ,
document_data = document_data ,
patterns = patterns ,
skip_chars = None ,
out_file = None )
patterns[ 0 ][ 'optional_attributes' ] = []
offset_mapping , annots_without_opt_attributes = \
text_extraction.extract_annotations( ingest_file ,
namespaces = namespaces ,
document_data = document_data ,
patterns = patterns ,
skip_chars = None ,
out_file = None )
expected_output_without_opt_attributes = \
{ '181' : [ { 'type': 'Problem' ,
'begin_pos': '181' ,
'end_pos': '188' ,
'raw_text': None } ] ,
'218' : [ { 'type': 'Problem' ,
'begin_pos': '218' ,
'end_pos': '224' ,
'raw_text': None } ]
}
expected_output_with_opt_attributes = \
{ '181' : [ { 'type': 'Problem' ,
'begin_pos': '181' ,
'end_pos': '188' ,
'raw_text': None ,
'conditional' : 'true' ,
'generic' : 'false' ,
'historical' : 'false' ,
'negated' : 'false' ,
'not_patient' : 'true' ,
'uncertain' : 'false' } ] ,
'218' : [ { 'type': 'Problem' ,
'begin_pos': '218' ,
'end_pos': '224' ,
'raw_text': None ,
'conditional' : 'false' ,
'generic' : 'false' ,
'historical' : 'true' ,
'negated' : 'false' ,
'not_patient' : 'false' ,
'uncertain' : 'true' } ]
}
assert annots_with_opt_attributes == \
expected_output_with_opt_attributes
assert annots_without_opt_attributes == \
expected_output_without_opt_attributes
assert annots_with_opt_attributes != \
expected_output_without_opt_attributes
assert annots_without_opt_attributes != \
expected_output_with_opt_attributes
def test_extract_annotations_overlapping_in_same_file():
ingest_file = 'tests/data/offset_matching/the_doctors_age_overlapping.xmi'
namespaces = { 'cas' :
"http:///uima/cas.ecore" ,
'custom' :
"http:///webanno/custom.ecore" }
document_data = dict( tag_xpath = './cas:Sofa' ,
content_attribute = 'sofaString' )
patterns = [ { 'type': 'Age' , 'xpath': './custom:PHI[@Time="Age"]',
'display_name': 'Age', 'short_name': 'Age', 'long_name': 'Age',
'optional_attributes': [], 'begin_attr': 'begin', 'end_attr': 'end' } ,
{ 'type': 'DateTime' , 'xpath': './custom:PHI[@Time="DateTime"]',
'display_name': 'DateTime', 'short_name': 'DateTime', 'long_name': 'DateTime',
'optional_attributes': [], 'begin_attr': 'begin', 'end_attr': 'end' } ,
{ 'type': 'Number' , 'xpath': './custom:PHI[@Time="Number"]',
'display_name': 'Number', 'short_name': 'Number', 'long_name': 'Number',
'optional_attributes': [], 'begin_attr': 'begin', 'end_attr': 'end' }
]
offset_mapping , annots = \
text_extraction.extract_annotations( ingest_file ,
namespaces = namespaces ,
document_data = document_data ,
patterns = patterns ,
skip_chars = None ,
out_file = None )
expected_annots = { '24' : [ { 'type': 'Age', 'end_pos': '27', 'raw_text': None, 'begin_pos': '24' } ,
{ 'type': 'Number', 'end_pos': '27', 'raw_text': None, 'begin_pos': '24' } ] ,
'41' : [ {'type': 'DateTime', 'end_pos': '59', 'raw_text': None, 'begin_pos': '41'} ,
{'type': 'DateTime', 'end_pos': '54', 'raw_text': None, 'begin_pos': '41'} ] }
assert annots == expected_annots
#############################################
## Test writing to disk
#############################################
def test_writing_dictionary_for_datetime_from_0005_gs():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
reference_file = 'tests/data/i2b2_2016_track-1_reference_out/0005_gs.xml'
config_file = 'config/i2b2_2016_track-1.conf'
try:
tmp_descriptor, tmp_file = tempfile.mkstemp()
os.close( tmp_descriptor )
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
text_extraction.extract_annotations( ingest_file ,
namespaces = namespaces ,
document_data = document_data ,
patterns = patterns ,
skip_chars = r'[\s]' ,
out_file = tmp_file )
with open( reference_file , 'r' ) as rf:
reloaded_reference = json.load( rf )
with open( tmp_file , 'r' ) as tf:
reloaded_test = json.load( tf )
assert reloaded_reference[ 'annotations' ] == reloaded_test[ 'annotations' ]
assert reloaded_reference[ 'offset_mapping' ] == reloaded_test[ 'offset_mapping' ]
assert reloaded_reference[ 'raw_content' ] == reloaded_test[ 'raw_content' ]
finally:
os.remove( tmp_file )
## TODO - add tests for ingore_whitespace == True | False
def test_of_presaved_dictionary_for_complex_patterns():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
presaved_file = 'tests/data/i2b2_2016_track-1_reference_out/0005_gs.xml'
config_file = 'config/i2b2_2016_track-1.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
with open( presaved_file , 'r' ) as fp:
reloaded_json = json.load( fp )
offset_mapping , strict_starts = \
text_extraction.extract_annotations( ingest_file ,
namespaces = namespaces ,
document_data = document_data ,
patterns = patterns ,
skip_chars = r'[\s]' ,
out_file = None )
assert reloaded_json[ 'annotations' ] == strict_starts
def test_of_identity_read_write_of_dictionary_for_complex_patterns():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
config_file = 'config/i2b2_2016_track-1.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
with tempfile.NamedTemporaryFile() as tmpfile_handle:
assert os.path.exists( tmpfile_handle.name )
offset_mapping , strict_starts = \
text_extraction.extract_annotations( ingest_file ,
namespaces = namespaces ,
document_data = document_data ,
patterns = patterns ,
skip_chars = r'[\s]' ,
out_file = tmpfile_handle.name )
reloaded_json = json.load( tmpfile_handle )
assert reloaded_json[ 'annotations' ] == strict_starts
assert os.path.exists( tmpfile_handle.name )
assert os.path.exists( tmpfile_handle.name ) == False
## TODO - add real delimited ingest file for testing
# def test_of_identity_read_write_of_dictionary_for_delimited_patterns():
# ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
# config_file = 'config/plaintext_sentences.conf'
# namespaces , document_data , patterns = \
# args_and_configs.process_config( config_file = config_file ,
# score_key = 'Short Name' ,
# score_values = [ '.*' ] )
# with tempfile.NamedTemporaryFile() as tmpfile_handle:
# assert os.path.exists( tmpfile_handle.name )
# offset_mapping , strict_starts = \
# text_extraction.extract_annotations( ingest_file ,
# namespaces = namespaces ,
# document_data = document_data ,
# patterns = patterns ,
# skip_chars = r'[\s]' ,
# out_file = tmpfile_handle.name )
# reloaded_json = json.load( tmpfile_handle )
# assert reloaded_json[ 'annotations' ] == strict_starts
# assert os.path.exists( tmpfile_handle.name )
# assert os.path.exists( tmpfile_handle.name ) == False
def test_empty_contents_of_write_of_dictionary_for_brat_patterns():
ingest_file = 'tests/data/brat_reference/ibm.ann'
config_file = 'config/brat_problems_allergies_standoff.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
with tempfile.NamedTemporaryFile() as tmpfile_handle:
assert os.path.exists( tmpfile_handle.name )
offset_mapping , strict_starts = \
text_extraction.extract_annotations( ingest_file ,
namespaces = namespaces ,
document_data = document_data ,
patterns = patterns ,
skip_chars = r'[\s]' ,
out_file = tmpfile_handle.name )
assert strict_starts == {}
assert os.path.exists( tmpfile_handle.name )
with open( tmpfile_handle.name , 'r' ) as rf:
reloaded_out_file = json.load( rf )
assert reloaded_out_file[ "annotations" ] == {}
assert reloaded_out_file[ "raw_content" ] == "International Business Machines Corporation: IBM is Big Blue\n"
assert os.path.exists( tmpfile_handle.name ) == False
def test_contents_of_write_of_dictionary_for_brat_patterns():
ingest_file = 'tests/data/brat_reference/problems_and_allergens.ann'
config_file = 'config/brat_problems_allergies_standoff.conf'
namespaces , document_data , patterns = \
args_and_configs.process_config( config_file = config_file ,
score_key = 'Short Name' ,
score_values = [ '.*' ] )
with tempfile.NamedTemporaryFile() as tmpfile_handle:
assert os.path.exists( tmpfile_handle.name )
offset_mapping , strict_starts = \
text_extraction.extract_annotations( ingest_file ,
namespaces = namespaces ,
document_data = document_data ,
patterns = patterns ,
skip_chars = r'[\s]' ,
out_file = tmpfile_handle.name )
reloaded_json = json.load( tmpfile_handle )
assert reloaded_json[ 'annotations' ] == strict_starts
## T34 Problem 474 493 shortness of breath
## A1 Negated T34
assert strict_starts[ '474' ][ 0 ][ 'begin_pos' ] == '474'
assert strict_starts[ '474' ][ 0 ][ 'end_pos' ] == '493'
assert strict_starts[ '474' ][ 0 ][ 'raw_text' ] == 'shortness of breath'
assert strict_starts[ '474' ][ 0 ][ 'Historical' ] == 'false'
assert strict_starts[ '474' ][ 0 ][ 'Negated' ] == 'true'
assert os.path.exists( tmpfile_handle.name )
assert os.path.exists( tmpfile_handle.name ) == False
def test_brat_text_bound_annotation_simple():
line = 'T1 Organization 0 43 International Business Machines Corporation'
new_entry = text_extraction.extract_brat_text_bound_annotation( 'test.ann' ,
line ,
offset_mapping = {} ,
tag_name = 'Organization' ,
line_type = 'Organization' ,
optional_attributes = [] )
assert( new_entry[ 'match_index' ] == 'T1' )
assert( new_entry[ 'type' ] == 'Organization' )
assert( new_entry[ 'begin_pos' ] == '0' )
assert( new_entry[ 'end_pos' ] == '43' )
assert( new_entry[ 'raw_text' ] == 'International Business Machines Corporation' )
def test_brat_text_bound_annotation_attributes_default_to_false():
line = 'T1 Organization 0 43 International Business Machines Corporation'
new_entry = text_extraction.extract_brat_text_bound_annotation( 'test.ann' ,
line ,
offset_mapping = {} ,
tag_name = 'Organization' ,
line_type = 'Organization' ,
optional_attributes = [ 'Negated' ,
'Historical' ] )
assert( new_entry[ 'Negated' ] == 'false' )
assert( new_entry[ 'Historical' ] == 'false' )
def test_brat_text_bound_annotation_offset_mapping_works():
line = 'T1 Organization 0 43 International Business Machines Corporation'
new_entry = text_extraction.extract_brat_text_bound_annotation( 'test.ann' ,
line ,
offset_mapping = { "0": "3" ,
"43": "42" } ,
tag_name = 'Organization' ,
line_type = 'Organization' ,
optional_attributes = [] )
assert( new_entry[ 'begin_pos' ] == '0' )
assert( new_entry[ 'begin_pos_mapped' ] == '3' )
assert( new_entry[ 'end_pos' ] == '43' )
assert( new_entry[ 'end_pos_mapped' ] == '42' )
def test_brat_text_bound_annotation_skip_other_tags():
line = 'T1 Organization 0 43 International Business Machines Corporation'
new_entry = text_extraction.extract_brat_text_bound_annotation( 'test.ann' ,
line ,
offset_mapping = {} ,
tag_name = 'Person' ,
line_type = 'Person' ,
optional_attributes = [] )
assert( new_entry == None )
def test_brat_text_bound_annotation_two_span_discontinuous():
## North and South America
## T1 Location 0 5;16 23 North America
## T2 Location 10 23 South America
line = 'T1 Location 0 5;16 23 North America'
new_entry = text_extraction.extract_brat_text_bound_annotation( 'test.ann' ,
line ,
offset_mapping = { "0": "3" ,
"23": "42" } ,
tag_name = 'Location' ,
line_type = 'Location' ,
optional_attributes = [] )
assert( new_entry[ 'begin_pos' ] == '0' )
assert( new_entry[ 'begin_pos_mapped' ] == '3' )
assert( new_entry[ 'end_pos' ] == '23' )
assert( new_entry[ 'end_pos_mapped' ] == '42' )
def test_brat_text_bound_annotation_three_span_discontinuous():
## North and South America
## T1 Location 0 5;16 23 North America
## T2 Location 10 23 South America
line = 'T1 Location 0 5;8 12;16 23 North America'
new_entry = text_extraction.extract_brat_text_bound_annotation( 'test.ann' ,
line ,
offset_mapping = { "0": "3" ,
"23": "42" } ,
tag_name = 'Location' ,
line_type = 'Location' ,
optional_attributes = [] )
assert( new_entry[ 'begin_pos' ] == '0' )
assert( new_entry[ 'begin_pos_mapped' ] == '3' )
assert( new_entry[ 'end_pos' ] == '23' )
assert( new_entry[ 'end_pos_mapped' ] == '42' )
def test_brat_relation_binary():
## T3 Organization 33 41 Ericsson
## T4 Country 75 81 Sweden
## R1 Origin Arg1:T3 Arg2:T4
line = 'R1 Origin Arg1:T3 Arg2:T4'
new_entry = text_extraction.extract_brat_relation( 'test.ann' ,
line ,
tag_name = '' ,
optional_attributes = [] )
assert( new_entry == None )
def test_brat_relation_equivalence():
## T1 Organization 0 43 International Business Machines Corporation
## T2 Organization 45 48 IBM
## T3 Organization 52 60 Big Blue
## * Equiv T1 T2 T3
line = '* Equiv T1 T2 T3'
new_entry = text_extraction.extract_brat_equivalence( 'test.ann' ,
line ,
optional_attributes = [] )
assert( new_entry == None )
def test_brat_event():
## T1 Organization 0 4 Sony
## T2 MERGE-ORG 14 27 joint venture
## T3 Organization 33 41 Ericsson
## E1 MERGE-ORG:T2 Org1:T1 Org2:T3
line = 'E1 MERGE-ORG:T2 Org1:T1 Org2:T3'
new_entry = text_extraction.extract_brat_event( 'test.ann' ,
line ,
tag_name = '' ,
optional_attributes = [] )
assert( new_entry == None )
def test_brat_skip_non_optional_attributes():
## T1 Organization 0 4 Sony
## T2 MERGE-ORG 14 27 joint venture
## T3 Organization 33 41 Ericsson
## E1 MERGE-ORG:T2 Org1:T1 Org2:T3
## A1 Negation E1
line = 'A1 Negation E1'
new_attribute_value = text_extraction.extract_brat_attribute( 'test.ann' ,
line ,
optional_attributes = [ 'Negated' ,
'Historical' ] )
assert( new_attribute_value == [ 'E1' , 'Negation' , None , 'true' ] )
def test_brat_attribute_binary():
## T1 Organization 0 4 Sony
## T2 MERGE-ORG 14 27 joint venture
## T3 Organization 33 41 Ericsson
## E1 MERGE-ORG:T2 Org1:T1 Org2:T3
## A1 Negation E1
line = 'A1 Negation E1'
new_attribute_value = text_extraction.extract_brat_attribute( 'test.ann' ,
line ,
optional_attributes = [ 'Negation' ] )
assert( new_attribute_value == [ 'E1' , 'Negation' , 'Negation' , 'true' ] )
def test_brat_attribute_binary_m_prefix():
## T1 Organization 0 4 Sony
## T2 MERGE-ORG 14 27 joint venture
## T3 Organization 33 41 Ericsson
## E1 MERGE-ORG:T2 Org1:T1 Org2:T3
## M1 Negation E1
line = 'M1 Negation E1'
new_attribute_value = text_extraction.extract_brat_attribute( 'test.ann' ,
line ,
optional_attributes = [ 'Negation' ] )
assert( new_attribute_value == [ 'E1' , 'Negation' , 'Negation' , 'true' ] )
def test_brat_attribute_multivalue_string():
## T1 Organization 0 4 Sony
## T2 MERGE-ORG 14 27 joint venture
## T3 Organization 33 41 Ericsson
## E1 MERGE-ORG:T2 Org1:T1 Org2:T3
## A2 Confidence E2 L1
line = 'A2 Confidence E2 L1'
new_attribute_value = text_extraction.extract_brat_attribute( 'test.ann' ,
line ,
optional_attributes = [ 'Confidence' ] )
assert( new_attribute_value == None )
def test_brat_normalization_ignore_unselected_reference():
## N1 Reference T1 Wikipedia:534366 Barack Obama
line = 'N1 Reference T1 Wikipedia:534366 Barack Obama'
new_entry = text_extraction.extract_brat_normalization( 'test.ann' ,
line ,
normalization_engines = [ 'Britanica' ] )
assert( new_entry == None )
def test_brat_normalization_simple_lookup():
## N1 Reference T1 Wikipedia:534366 Barack Obama
line = 'N1 Reference T1 Wikipedia:534366 Barack Obama'
new_entry = text_extraction.extract_brat_normalization( 'test.ann' ,
line ,
normalization_engines = [ 'Wikipedia' ] )
assert( new_entry == [ 'T1' , 'Wikipedia' , '534366' , 'Barack Obama' ] )
#############################################
## Test extracting document contents
#############################################
def test_empty_extraction_of_doc_content_from_0016_gs():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0016_gs.xml'
## Look for a path that doesn't exist so that we get an empty return
test_dd = dict( cdata_xpath = '/dev/null' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = {} ,
document_data = test_dd ,
skip_chars = r'[\s]' )
expected_output = {}
assert offset_mapping == expected_output
def test_extracting_doc_content_from_0016_gs():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0016_gs.xml'
test_dd = dict( cdata_xpath = './TEXT' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = {} ,
document_data = test_dd ,
skip_chars = r'[\s]' )
expected_output = { '0': None ,
'1': None ,
'2': None ,
'3': '0', '4': '1', '5': '2', '6': None }
for index in [ "0" , "1" , "2" , "3" , "4" , "5" , "6" ]:
assert offset_mapping[ index ] == expected_output[ index ]
def test_extracting_doc_content_from_0016_gs_skip_z_char():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0016_gs.xml'
test_dd = dict( cdata_xpath = './TEXT' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = {} ,
document_data = test_dd ,
skip_chars = r'[\sz]' )
expected_output = { '0': None ,
'1': None ,
'2': None ,
'3': None , '4': None , '5': '0', '6': None }
for index in [ "0" , "1" , "2" , "3" , "4" , "5" , "6" ]:
assert offset_mapping[ index ] == expected_output[ index ]
def test_extracting_doc_content_from_0016_gs_skip_zpipe_char():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0016_gs.xml'
test_dd = dict( cdata_xpath = './TEXT' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = {} ,
document_data = test_dd ,
skip_chars = '[z|]' )
expected_output = { '0': '0' ,
'1': '1' ,
'2': '2' ,
'3': None, '4': None, '5': None, '6': '3' }
for index in [ "0" , "1" , "2" , "3" , "4" , "5" , "6" ]:
assert offset_mapping[ index ] == expected_output[ index ]
def test_extracting_doc_content_from_995723_sentences_xmi():
ingest_file = 'tests/data/sentences/995723.sentences.xmi'
test_dd = dict( tag_xpath = './cas:Sofa' ,
content_attribute = 'sofaString' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = { 'cas' :
"http:///uima/cas.ecore" } ,
document_data = test_dd ,
skip_chars = r'[\s]' )
expected_output = { '0': '0' , '1': '1' , '2': '2' , '3': '3' , '4': '4' ,
'5': '5' , '6': '6' , '7': '7' }
assert offset_mapping == expected_output
def test_offset_mapping_matches_pos_mapped_automatically():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
document_data = dict( cdata_xpath = './TEXT' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = {} ,
document_data = document_data ,
skip_chars = r'[\s]' )
strict_starts = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = offset_mapping ,
annotation_path = \
'./TAGS/DATE' ,
tag_name = 'DateTime' ,
begin_attribute = 'start' ,
end_attribute = 'end' )
for start_key in strict_starts:
begin_pos = strict_starts[ start_key ][ 0 ][ 'begin_pos' ]
begin_pos_mapped = strict_starts[ start_key ][ 0 ][ 'begin_pos_mapped' ]
end_pos = strict_starts[ start_key ][ 0 ][ 'end_pos' ]
end_pos_mapped = strict_starts[ start_key ][ 0 ][ 'end_pos_mapped' ]
## dictionary key is set to begin_pos
assert start_key == begin_pos
## mapping works for begin position
assert begin_pos != begin_pos_mapped
while( offset_mapping[ begin_pos ] == None ):
begin_pos = str( int( begin_pos ) + 1 )
assert begin_pos_mapped == offset_mapping[ begin_pos ]
## mapping works for end position
assert end_pos != end_pos_mapped
while( offset_mapping[ end_pos ] == None ):
end_pos = str( int( end_pos ) - 1 )
assert end_pos_mapped == offset_mapping[ end_pos ]
def test_offset_mapping_matches_pos_mapped_manually():
ingest_file = 'tests/data/i2b2_2016_track-1_reference/0005_gs.xml'
document_data = dict( cdata_xpath = './TEXT' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = {} ,
document_data = document_data ,
skip_chars = r'[\s]' )
strict_starts = \
text_extraction.extract_annotations_xml( ingest_file ,
offset_mapping = offset_mapping ,
annotation_path = \
'./TAGS/DATE' ,
tag_name = 'DateTime' ,
begin_attribute = 'start' ,
end_attribute = 'end' )
##
assert strict_starts[ '87' ][ 0 ][ 'begin_pos' ] == '87'
assert strict_starts[ '87' ][ 0 ][ 'begin_pos_mapped' ] == \
offset_mapping[ '87' ]
assert strict_starts[ '87' ][ 0 ][ 'end_pos' ] == '97'
assert strict_starts[ '87' ][ 0 ][ 'end_pos_mapped' ] == \
offset_mapping[ '97' ]
##
assert strict_starts[ '2404' ][ 0 ][ 'begin_pos' ] == '2404'
assert strict_starts[ '2404' ][ 0 ][ 'begin_pos_mapped' ] == \
offset_mapping[ '2404' ]
assert strict_starts[ '2404' ][ 0 ][ 'end_pos' ] == '2410'
assert strict_starts[ '2404' ][ 0 ][ 'end_pos_mapped' ] == \
offset_mapping[ '2409' ]
def test_brat_standoff_extraction():
ingest_file = 'tests/data/brat_reference/ibm.ann'
document_data = dict( format = '.ann .txt' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = {} ,
document_data = document_data ,
skip_chars = r'[\s]' )
strict_starts = \
text_extraction.extract_annotations_brat_standoff( ingest_file ,
offset_mapping = offset_mapping ,
type_prefix = 'T' ,
tag_name = 'Organization' ,
line_type = 'Organization' )
##
assert strict_starts[ '0' ][ 0 ][ 'begin_pos' ] == '0'
assert strict_starts[ '0' ][ 0 ][ 'end_pos' ] == '43'
assert strict_starts[ '0' ][ 0 ][ 'raw_text' ] == 'International Business Machines Corporation'
##
assert strict_starts[ '45' ][ 0 ][ 'begin_pos' ] == '45'
assert strict_starts[ '45' ][ 0 ][ 'end_pos' ] == '48'
##
assert strict_starts[ '52' ][ 0 ][ 'raw_text' ] == 'Big Blue'
def test_brat_standoff_extraction_with_attributes():
ingest_file = 'tests/data/brat_reference/problems_and_allergens.ann'
document_data = dict( format = '.ann .txt' )
raw_content , offset_mapping = \
text_extraction.extract_chars( ingest_file ,
namespaces = {} ,
document_data = document_data ,
skip_chars = r'[\s]' )
strict_starts = \
text_extraction.extract_annotations_brat_standoff( ingest_file ,
offset_mapping = offset_mapping ,
type_prefix = 'T' ,
tag_name = 'Problem' ,
line_type = 'Problem' ,
optional_attributes = [ 'Conditional' ,
'Generic' ,
'Historical' ,
'Negated' ,
'NotPatient' ,
'Uncertain' ] )
##
assert strict_starts[ '474' ][ 0 ][ 'begin_pos' ] == '474'
assert strict_starts[ '474' ][ 0 ][ 'end_pos' ] == '493'
assert strict_starts[ '474' ][ 0 ][ 'raw_text' ] == 'shortness of breath'
| 51.979809
| 117
| 0.453274
| 4,062
| 48,913
| 5.111521
| 0.082472
| 0.039879
| 0.050571
| 0.040071
| 0.853538
| 0.813466
| 0.780427
| 0.741993
| 0.723499
| 0.685835
| 0
| 0.034411
| 0.447468
| 48,913
| 940
| 118
| 52.035106
| 0.733849
| 0.05935
| 0
| 0.720361
| 0
| 0
| 0.138585
| 0.037442
| 0
| 0
| 0
| 0.001064
| 0.126289
| 1
| 0.054124
| false
| 0
| 0.006443
| 0
| 0.060567
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
437aceee1cd917c5eca8912e5af2dd0f3fbf776d
| 184
|
py
|
Python
|
plugins/twitter/komand_twitter/triggers/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/twitter/komand_twitter/triggers/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/twitter/komand_twitter/triggers/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .mentions.trigger import Mentions
from .messages.trigger import Messages
from .tweets.trigger import Tweets
from .user.trigger import User
| 30.666667
| 39
| 0.809783
| 27
| 184
| 5.518519
| 0.518519
| 0.348993
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13587
| 184
| 5
| 40
| 36.8
| 0.937107
| 0.201087
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
78e90741e36a3c1a3a570950a5aeb64a96dc54c8
| 2,174
|
py
|
Python
|
maze/management/commands/populate.py
|
TerryHowe/pymaze
|
139f15fb8e932c1cf1e63f5c5aee7895691e993f
|
[
"MIT"
] | 1
|
2020-12-14T04:01:08.000Z
|
2020-12-14T04:01:08.000Z
|
maze/management/commands/populate.py
|
TerryHowe/pymaze
|
139f15fb8e932c1cf1e63f5c5aee7895691e993f
|
[
"MIT"
] | null | null | null |
maze/management/commands/populate.py
|
TerryHowe/pymaze
|
139f15fb8e932c1cf1e63f5c5aee7895691e993f
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Populate the database'
def handle(self, *args, **options):
from maze.models import Passage
one = Passage(room_x=0,room_y=0,direction='N')
two = Passage(room_x=0,room_y=1,direction='S')
one.save()
two.save()
one.destination = two
two.destination = one
one.save()
two.save()
one = Passage(room_x=0,room_y=0,direction='E')
two = Passage(room_x=1,room_y=0,direction='W')
one.save()
two.save()
one.destination = two
two.destination = one
one.save()
two.save()
one = Passage(room_x=1,room_y=0,direction='E')
two = Passage(room_x=2,room_y=0,direction='W')
one.save()
two.save()
one.destination = two
two.destination = one
one.save()
two.save()
one = Passage(room_x=2,room_y=0,direction='N')
two = Passage(room_x=2,room_y=1,direction='S')
one.save()
two.save()
one.destination = two
two.destination = one
one.save()
two.save()
one = Passage(room_x=2,room_y=1,direction='N')
two = Passage(room_x=2,room_y=2,direction='S')
one.save()
two.save()
one.destination = two
two.destination = one
one.save()
two.save()
one = Passage(room_x=2,room_y=2,direction='W')
two = Passage(room_x=1,room_y=2,direction='E')
one.save()
two.save()
one.destination = two
two.destination = one
one.save()
two.save()
one = Passage(room_x=1,room_y=2,direction='W')
two = Passage(room_x=0,room_y=2,direction='E')
one.save()
two.save()
one.destination = two
two.destination = one
one.save()
two.save()
one = Passage(room_x=0,room_y=2,direction='S')
two = Passage(room_x=0,room_y=1,direction='N')
one.save()
two.save()
one.destination = two
two.destination = one
one.save()
two.save()
| 26.839506
| 54
| 0.543698
| 296
| 2,174
| 3.885135
| 0.121622
| 0.153043
| 0.166957
| 0.194783
| 0.876522
| 0.874783
| 0.874783
| 0.874783
| 0.86087
| 0.651304
| 0
| 0.021505
| 0.315547
| 2,174
| 80
| 55
| 27.175
| 0.751344
| 0
| 0
| 0.695652
| 0
| 0
| 0.017019
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014493
| false
| 0.246377
| 0.028986
| 0
| 0.072464
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
60803f514279c500398dd43ec81b0ad5d57430f3
| 10,931
|
py
|
Python
|
st2client/tests/unit/test_commands.py
|
UbuntuEvangelist/st2
|
36af04f2caa03b396fb8ab00fd6d700e827fda8d
|
[
"Apache-2.0"
] | 1
|
2020-11-21T10:11:25.000Z
|
2020-11-21T10:11:25.000Z
|
st2client/tests/unit/test_commands.py
|
FairwindsOps/st2
|
2b76ca740c4af0d6b2c1d1ba5534ce4133fd16fa
|
[
"Apache-2.0"
] | 1
|
2015-06-08T15:27:11.000Z
|
2015-06-08T15:27:11.000Z
|
st2client/tests/unit/test_commands.py
|
FairwindsOps/st2
|
2b76ca740c4af0d6b2c1d1ba5534ce4133fd16fa
|
[
"Apache-2.0"
] | 13
|
2017-01-12T11:07:20.000Z
|
2019-04-19T09:55:49.000Z
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import mock
import json
import logging
import argparse
import tempfile
import unittest2
from tests import base
from st2client import models
from st2client.utils import httpclient
from st2client.commands import resource
LOG = logging.getLogger(__name__)
class TestResourceCommand(unittest2.TestCase):
def __init__(self, *args, **kwargs):
super(TestResourceCommand, self).__init__(*args, **kwargs)
self.parser = argparse.ArgumentParser()
self.subparsers = self.parser.add_subparsers()
self.branch = resource.ResourceBranch(
base.FakeResource, 'Test Command', base.FakeApp(), self.subparsers)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(base.RESOURCES), 200, 'OK')))
def test_command_list(self):
args = self.parser.parse_args(['fakeresource', 'list'])
self.assertEqual(args.func, self.branch.commands['list'].run_and_print)
instances = self.branch.commands['list'].run(args)
actual = [instance.serialize() for instance in instances]
expected = json.loads(json.dumps(base.RESOURCES))
self.assertListEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse('', 500, 'INTERNAL SERVER ERROR')))
def test_command_list_failed(self):
args = self.parser.parse_args(['fakeresource', 'list'])
self.assertRaises(Exception, self.branch.commands['list'].run, args)
@mock.patch.object(
models.ResourceManager, 'get_by_name',
mock.MagicMock(return_value=None))
@mock.patch.object(
models.ResourceManager, 'get_by_id',
mock.MagicMock(return_value=base.FakeResource(**base.RESOURCES[0])))
def test_command_get_by_id(self):
args = self.parser.parse_args(['fakeresource', 'get', '123'])
self.assertEqual(args.func, self.branch.commands['get'].run_and_print)
instance = self.branch.commands['get'].run(args)
actual = instance.serialize()
expected = json.loads(json.dumps(base.RESOURCES[0]))
self.assertEqual(actual, expected)
@mock.patch.object(
models.ResourceManager, 'get_by_name',
mock.MagicMock(return_value=base.FakeResource(**base.RESOURCES[0])))
@mock.patch.object(
models.ResourceManager, 'get_by_id',
mock.MagicMock(return_value=None))
def test_command_get_by_name(self):
args = self.parser.parse_args(['fakeresource', 'get', 'abc'])
self.assertEqual(args.func, self.branch.commands['get'].run_and_print)
instance = self.branch.commands['get'].run(args)
actual = instance.serialize()
expected = json.loads(json.dumps(base.RESOURCES[0]))
self.assertEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps([base.RESOURCES[0]]), 200, 'OK')))
def test_command_get(self):
args = self.parser.parse_args(['fakeresource', 'get', 'abc'])
self.assertEqual(args.func, self.branch.commands['get'].run_and_print)
instance = self.branch.commands['get'].run(args)
actual = instance.serialize()
expected = json.loads(json.dumps(base.RESOURCES[0]))
self.assertEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse('', 404, 'NOT FOUND')))
def test_command_get_404(self):
args = self.parser.parse_args(['fakeresource', 'get', 'cba'])
self.assertEqual(args.func, self.branch.commands['get'].run_and_print)
self.assertRaises(resource.ResourceNotFoundError,
self.branch.commands['get'].run,
args)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse('', 500, 'INTERNAL SERVER ERROR')))
def test_command_get_failed(self):
args = self.parser.parse_args(['fakeresource', 'get', 'cba'])
self.assertRaises(Exception, self.branch.commands['get'].run, args)
@mock.patch.object(
httpclient.HTTPClient, 'post',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(base.RESOURCES[0]), 200, 'OK')))
def test_command_create(self):
instance = base.FakeResource(name='abc')
fd, path = tempfile.mkstemp(suffix='.json')
try:
with open(path, 'a') as f:
f.write(json.dumps(instance.serialize(), indent=4))
args = self.parser.parse_args(['fakeresource', 'create', path])
self.assertEqual(args.func,
self.branch.commands['create'].run_and_print)
instance = self.branch.commands['create'].run(args)
actual = instance.serialize()
expected = json.loads(json.dumps(base.RESOURCES[0]))
self.assertEqual(actual, expected)
finally:
os.close(fd)
os.unlink(path)
@mock.patch.object(
httpclient.HTTPClient, 'post',
mock.MagicMock(return_value=base.FakeResponse('', 500, 'INTERNAL SERVER ERROR')))
def test_command_create_failed(self):
instance = base.FakeResource(name='abc')
fd, path = tempfile.mkstemp(suffix='.json')
try:
with open(path, 'a') as f:
f.write(json.dumps(instance.serialize(), indent=4))
args = self.parser.parse_args(['fakeresource', 'create', path])
self.assertRaises(Exception,
self.branch.commands['create'].run,
args)
finally:
os.close(fd)
os.unlink(path)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps([base.RESOURCES[0]]), 200, 'OK')))
@mock.patch.object(
httpclient.HTTPClient, 'put',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(base.RESOURCES[0]), 200, 'OK')))
def test_command_update(self):
instance = base.FakeResource(id='123', name='abc')
fd, path = tempfile.mkstemp(suffix='.json')
try:
with open(path, 'a') as f:
f.write(json.dumps(instance.serialize(), indent=4))
args = self.parser.parse_args(
['fakeresource', 'update', '123', path])
self.assertEqual(args.func,
self.branch.commands['update'].run_and_print)
instance = self.branch.commands['update'].run(args)
actual = instance.serialize()
expected = json.loads(json.dumps(base.RESOURCES[0]))
self.assertEqual(actual, expected)
finally:
os.close(fd)
os.unlink(path)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps([base.RESOURCES[0]]), 200, 'OK')))
@mock.patch.object(
httpclient.HTTPClient, 'put',
mock.MagicMock(return_value=base.FakeResponse('', 500, 'INTERNAL SERVER ERROR')))
def test_command_update_failed(self):
instance = base.FakeResource(id='123', name='abc')
fd, path = tempfile.mkstemp(suffix='.json')
try:
with open(path, 'a') as f:
f.write(json.dumps(instance.serialize(), indent=4))
args = self.parser.parse_args(
['fakeresource', 'update', '123', path])
self.assertRaises(Exception,
self.branch.commands['update'].run,
args)
finally:
os.close(fd)
os.unlink(path)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps([base.RESOURCES[0]]), 200, 'OK')))
def test_command_update_id_mismatch(self):
instance = base.FakeResource(id='789', name='abc')
fd, path = tempfile.mkstemp(suffix='.json')
try:
with open(path, 'a') as f:
f.write(json.dumps(instance.serialize(), indent=4))
args = self.parser.parse_args(
['fakeresource', 'update', '123', path])
self.assertRaises(Exception,
self.branch.commands['update'].run,
args)
finally:
os.close(fd)
os.unlink(path)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps([base.RESOURCES[0]]), 200, 'OK')))
@mock.patch.object(
httpclient.HTTPClient, 'delete',
mock.MagicMock(return_value=base.FakeResponse('', 204, 'NO CONTENT')))
def test_command_delete(self):
args = self.parser.parse_args(['fakeresource', 'delete', 'abc'])
self.assertEqual(args.func,
self.branch.commands['delete'].run_and_print)
self.branch.commands['delete'].run(args)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse('', 404, 'NOT FOUND')))
def test_command_delete_404(self):
args = self.parser.parse_args(['fakeresource', 'delete', 'cba'])
self.assertEqual(args.func,
self.branch.commands['delete'].run_and_print)
self.assertRaises(resource.ResourceNotFoundError,
self.branch.commands['delete'].run,
args)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps([base.RESOURCES[0]]), 200, 'OK')))
@mock.patch.object(
httpclient.HTTPClient, 'delete',
mock.MagicMock(return_value=base.FakeResponse('', 500, 'INTERNAL SERVER ERROR')))
def test_command_delete_failed(self):
args = self.parser.parse_args(['fakeresource', 'delete', 'cba'])
self.assertRaises(Exception, self.branch.commands['delete'].run, args)
| 43.899598
| 99
| 0.630134
| 1,241
| 10,931
| 5.455278
| 0.141015
| 0.036928
| 0.063811
| 0.074446
| 0.816987
| 0.794978
| 0.77873
| 0.74195
| 0.71418
| 0.691433
| 0
| 0.012801
| 0.235294
| 10,931
| 248
| 100
| 44.076613
| 0.797105
| 0.068612
| 0
| 0.739336
| 0
| 0
| 0.069939
| 0
| 0
| 0
| 0
| 0
| 0.109005
| 1
| 0.075829
| false
| 0
| 0.052133
| 0
| 0.132701
| 0.042654
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71789ebe12ec47f1d6b926b7de724219d21654b5
| 140
|
py
|
Python
|
keras_cbc/visualizations/__init__.py
|
cbc-authors/cbc_networks
|
b65a7967fa1f6ac2cec23503a9f01fa7bf04ae03
|
[
"BSD-3-Clause"
] | 15
|
2019-10-23T15:47:39.000Z
|
2022-03-16T20:56:21.000Z
|
keras_cbc/visualizations/__init__.py
|
cbc-authors/cbc_networks
|
b65a7967fa1f6ac2cec23503a9f01fa7bf04ae03
|
[
"BSD-3-Clause"
] | 1
|
2020-03-18T04:47:01.000Z
|
2020-03-24T10:07:36.000Z
|
keras_cbc/visualizations/__init__.py
|
saralajew/cbc_networks
|
b65a7967fa1f6ac2cec23503a9f01fa7bf04ae03
|
[
"BSD-3-Clause"
] | 4
|
2019-11-08T18:16:13.000Z
|
2020-04-03T15:09:39.000Z
|
from __future__ import absolute_import
from .basic_visualizations import *
from .input_dependent import *
from .input_independent import *
| 23.333333
| 38
| 0.835714
| 17
| 140
| 6.411765
| 0.529412
| 0.275229
| 0.275229
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121429
| 140
| 5
| 39
| 28
| 0.886179
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
71d20fe204f13d9754a79863ad88cd01ee83b5ab
| 7,842
|
py
|
Python
|
mock_ticket_api/tests/snapshots/snap_test_ticket_api.py
|
City-of-Helsinki/maritime-maas
|
8a6013d9bad52b262aa6e9fca16c4fa8d58d1255
|
[
"MIT"
] | null | null | null |
mock_ticket_api/tests/snapshots/snap_test_ticket_api.py
|
City-of-Helsinki/maritime-maas
|
8a6013d9bad52b262aa6e9fca16c4fa8d58d1255
|
[
"MIT"
] | 34
|
2021-03-05T15:07:17.000Z
|
2022-02-23T19:05:39.000Z
|
mock_ticket_api/tests/snapshots/snap_test_ticket_api.py
|
City-of-Helsinki/maritime-maas
|
8a6013d9bad52b262aa6e9fca16c4fa8d58d1255
|
[
"MIT"
] | 1
|
2022-02-24T13:57:52.000Z
|
2022-02-24T13:57:52.000Z
|
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots["test_availability 1"] = [
{"available": 4, "date": "2021-04-20", "total": 44, "trip_id": "1"},
{"available": 2, "date": "2021-04-20", "trip_id": "4"},
]
snapshots["test_ticket_confirmation 1"] = {
"id": "71ck37_1d",
"status": "CONFIRMED",
"tickets": [
{
"agency": {
"logo_url": "http://www.agency.com/logo.png",
"name": "MaaS Line",
},
"amount": 12,
"currency": "EUR",
"customer_type": "Aikuinen",
"departures": [
{
"depart_at": "2021-04-20T01:00:00Z",
"from": "Kauppatori",
"to": "Vallisaari",
}
],
"description": "This is the description of the ticket",
"id": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"instructions": "These are the instructions of the ticket",
"locale": "fi",
"name": "Day in Vallisaari",
"qr_code": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAYAAACtWK6eAAAHAklEQVR4nO3YwXYrOQhF0fz/T1fP/drEIlBCrn3W8kyiLoiTQX4uAG/52R0AmAxBgACCAAEEAQIIAgQQBAggCBBAECCAIEAAQYAAggABBAECUoL8/Pwc8avK3013/mnznJY/fJvUpQHN3vmg3ZyykNPy3PGOBGkabMU8q/JPm+e0/OHbpC4NaPbOB+3mlIWclueOdyRI02Ar5lmVf9o8p+UP3yZ1aUCzdz5oN6cs5LQ8d7xjqSC7mJbnHd05d4k/bf4EGZ7nHQS5B4IMz/MOgtwDQYbneQdB7oEgw/O8gyD3QJDhed5BkHs4TpDVh6uqX3W+u69pC7zr/K59iCAIQcacJ8iQgRCEIJ9CEIKMOU+QIQMhCEE+hSAEGXOeIEMGMu273XPrFvP0fQh7S106fCDTvts9N4IQhCAESecJe0tdOnwg077bPTeCEIQgBEnnCXtLXTp8INO+2z03ghBk1OJ156yCIB/0lrp0+EAIUlv/9H0Ie0tdOnwgBKmtf/o+hL2lLh0+EILU1j99H8LeUpcOHwhBauufvg9hb6lLhw+EILX1T9+HsLfUpeaH25Wnu86uhT9pISsgSFMeghDkn1q7A1RAkNx3CfJBrd0BKiBI7rsE+aDW7gAVECT3XYJ8UGt3gAoIkvsuQT6oVRlg2q9qgM7nzk/7ZSCI823np/0yEMT5tvPTfhkI4nzb+Wm/DARxvu38tF8Ggjjfdn7aL8O+/8UNpHuRqvKctGCn88yu30AQgrzyzK7fQBCCvPLMrt9AEIK88syu30AQgrzyzK7fQBCCvDLy37xVearOV7FrDlXffVrO6yIIQQgSZ01dIkgKgpyV87oIQhCCxFlTlwiSgiBn5bwughCEIHHW1K3VjxQ12D2oaeJ05+yec/cfhNU8qVplqaKPECSEIAQhSABBCEKQAIIQhCABBCEIQQII8mWCVA2ECLWLPe38LgErIQhB2s4ThCBLEIQgBAkgCEEIEkAQghAkgCAPEeRtsWGNdw/wW/8gdPe1611StcpSJYIRhCAEIQhBCJILRhCCEIQgBCFILhhBCEKQGxZjl1CnL173oq6yS6gwU2kxghDkDxCEIAQJIAhBCBJAEIIQJIAgBCFIwNcIsmvhp4lwep5pdarqVwpFkMEL0J1nWp2q+gQ5dCGn5ZlWp6o+QQ5dyGl5ptWpqk+QQxdyWp5pdarqE+TQhZyWZ1qdqvpjBek+P+0hps2nu99TBCQIQQhCEIJUna/qlyCfXiIIQQgSXCIIQQgSXCIIQQgSXBo2wKr8u+rvXICT83fP4boIMqI+QQjSWqcq/676BCFIa52q/LvqE4QgrXWq8u+qTxCCtNapyr+rPkEeIsjq+W7Rqti1qFVM+8N1yrtfF0H+1C9BCFISjCB7IUgegvyhX4IQpCQYQfZCkDwE+UO/BCHIWrFhjXeLWZX/W/N0f5cgBDk6D0FeixEklf9b8xDktRhBUvm/NQ9BXosRJJX/W/MQ5LUYQVL5vzXPYwXZ9XBVdaoeYtdi37EYFUzbkwwEIUgb0/YkA0EI0sa0PclAEIK0MW1PMhCEIG1M25MMBCFIG9P2JMMtX5m2eN1UCbt6ftcfim4IMvyBViFILQQZ/kCrEKQWggx/oFUIUgtBhj/QKgSphSDDH2gVgtTyNYJ0L8ZqnWm/qvynz6F7npUQZMCDEoQgJedX60z7VeU/fQ4EIQhBCPI/xQhCkAHzrIQgAx6UIA8R5BR2LfwupuU/SSiCEOTP56vyEGQIBCHIx1nLuj4IghDk46xlXR8EQQjycdayrg+CIAT5OOtJDXYPZHfeu4WqylN1vvu7GQgyuK9uCPI7BBncVzcE+R2CDO6rG4L8DkEG99UNQX6HIIP76oYgv1MqyC66BenOuUv8qvNVeablvC6CtNRZrU8QgrRCkNq+ps1hV87rIkhLndX6BCFIKwSp7WvaHHblvC6CtNRZrU+QhwvSvRi7HrT7/DShduXcNYfrIsit/a6eJ0htngwEubHf1fMEqc2TgSA39rt6niC1eTIQ5MZ+V88TpDZPBoLc2O/qeYLU5snwSEF2LdLp9U/vKwNBDnxoguTqZyDIgQ9NkFz9DAQ58KEJkqufgSAHPjRBcvUzEOTAhyZIrn4GgjTkr/ruNKb9Ier+7nURhCALEOTmwAQhyOTvXhdBCLIAQW4OTBCCTP7udRGEIAsQpClwN6c83K783X+gpvVVCUEIQpCoh9QlgmytQ5Dc+QwEIQhBoh5SlwiytQ5BcuczEIQgBIl6SF0qGnj3bzX/LqYt8K75THxHghCEIFGm1KUBy08QghCEIAQhCEEIEuepOp/KlLo0YPkJQpCxggBPgSBAAEGAAIIAAQQBAggCBBAECCAIEEAQIIAgQABBgACCAAEEAQL+A0h7mXOeq/H0AAAAAElFTkSuQmCC", # noqa: E501
"refresh_at": "2021-04-21T00:00:00Z",
"terms_of_use": "http://www.terms.and.conditions.fi",
"ticket_html": "<div>...</div>",
"ticket_type": "Päivälippu",
"valid_from": "2021-04-20T00:00:00Z",
"valid_to": "2021-04-21T00:00:00Z",
}
],
}
snapshots["test_ticket_details 1"] = {
"id": "71ck37_1d",
"status": "CONFIRMED",
"tickets": [
{
"agency": {
"logo_url": "http://www.agency.com/logo.png",
"name": "MaaS Line",
},
"amount": 12,
"currency": "EUR",
"customer_type": "Aikuinen",
"departures": [
{
"depart_at": "2021-04-20T01:00:00Z",
"from": "Kauppatori",
"to": "Vallisaari",
}
],
"description": "This is the description of the ticket",
"id": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"instructions": "These are the instructions of the ticket",
"locale": "fi",
"name": "Day in Vallisaari",
"qr_code": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAYAAACtWK6eAAAHAklEQVR4nO3YwXYrOQhF0fz/T1fP/drEIlBCrn3W8kyiLoiTQX4uAG/52R0AmAxBgACCAAEEAQIIAgQQBAggCBBAECCAIEAAQYAAggABBAECUoL8/Pwc8avK3013/mnznJY/fJvUpQHN3vmg3ZyykNPy3PGOBGkabMU8q/JPm+e0/OHbpC4NaPbOB+3mlIWclueOdyRI02Ar5lmVf9o8p+UP3yZ1aUCzdz5oN6cs5LQ8d7xjqSC7mJbnHd05d4k/bf4EGZ7nHQS5B4IMz/MOgtwDQYbneQdB7oEgw/O8gyD3QJDhed5BkHs4TpDVh6uqX3W+u69pC7zr/K59iCAIQcacJ8iQgRCEIJ9CEIKMOU+QIQMhCEE+hSAEGXOeIEMGMu273XPrFvP0fQh7S106fCDTvts9N4IQhCAESecJe0tdOnwg077bPTeCEIQgBEnnCXtLXTp8INO+2z03ghBk1OJ156yCIB/0lrp0+EAIUlv/9H0Ie0tdOnwgBKmtf/o+hL2lLh0+EILU1j99H8LeUpcOHwhBauufvg9hb6lLhw+EILX1T9+HsLfUpeaH25Wnu86uhT9pISsgSFMeghDkn1q7A1RAkNx3CfJBrd0BKiBI7rsE+aDW7gAVECT3XYJ8UGt3gAoIkvsuQT6oVRlg2q9qgM7nzk/7ZSCI823np/0yEMT5tvPTfhkI4nzb+Wm/DARxvu38tF8Ggjjfdn7aL8O+/8UNpHuRqvKctGCn88yu30AQgrzyzK7fQBCCvPLMrt9AEIK88syu30AQgrzyzK7fQBCCvDLy37xVearOV7FrDlXffVrO6yIIQQgSZ01dIkgKgpyV87oIQhCCxFlTlwiSgiBn5bwughCEIHHW1K3VjxQ12D2oaeJ05+yec/cfhNU8qVplqaKPECSEIAQhSABBCEKQAIIQhCABBCEIQQII8mWCVA2ECLWLPe38LgErIQhB2s4ThCBLEIQgBAkgCEEIEkAQghAkgCAPEeRtsWGNdw/wW/8gdPe1611StcpSJYIRhCAEIQhBCJILRhCCEIQgBCFILhhBCEKQGxZjl1CnL173oq6yS6gwU2kxghDkDxCEIAQJIAhBCBJAEIIQJIAgBCFIwNcIsmvhp4lwep5pdarqVwpFkMEL0J1nWp2q+gQ5dCGn5ZlWp6o+QQ5dyGl5ptWpqk+QQxdyWp5pdarqE+TQhZyWZ1qdqvpjBek+P+0hps2nu99TBCQIQQhCEIJUna/qlyCfXiIIQQgSXCIIQQgSXCIIQQgSXBo2wKr8u+rvXICT83fP4boIMqI+QQjSWqcq/676BCFIa52q/LvqE4QgrXWq8u+qTxCCtNapyr+rPkEeIsjq+W7Rqti1qFVM+8N1yrtfF0H+1C9BCFISjCB7IUgegvyhX4IQpCQYQfZCkDwE+UO/BCHIWrFhjXeLWZX/W/N0f5cgBDk6D0FeixEklf9b8xDktRhBUvm/NQ9BXosRJJX/W/MQ5LUYQVL5vzXPYwXZ9XBVdaoeYtdi37EYFUzbkwwEIUgb0/YkA0EI0sa0PclAEIK0MW1PMhCEIG1M25MMBCFIG9P2JMMtX5m2eN1UCbt6ftcfim4IMvyBViFILQQZ/kCrEKQWggx/oFUIUgtBhj/QKgSphSDDH2gVgtTyNYJ0L8ZqnWm/qvynz6F7npUQZMCDEoQgJedX60z7VeU/fQ4EIQhBCPI/xQhCkAHzrIQgAx6UIA8R5BR2LfwupuU/SSiCEOTP56vyEGQIBCHIx1nLuj4IghDk46xlXR8EQQjycdayrg+CIAT5OOtJDXYPZHfeu4WqylN1vvu7GQgyuK9uCPI7BBncVzcE+R2CDO6rG4L8DkEG99UNQX6HIIP76oYgv1MqyC66BenOuUv8qvNVeablvC6CtNRZrU8QgrRCkNq+ps1hV87rIkhLndX6BCFIKwSp7WvaHHblvC6CtNRZrU+QhwvSvRi7HrT7/DShduXcNYfrIsit/a6eJ0htngwEubHf1fMEqc2TgSA39rt6niC1eTIQ5MZ+V88TpDZPBoLc2O/qeYLU5snwSEF2LdLp9U/vKwNBDnxoguTqZyDIgQ9NkFz9DAQ58KEJkqufgSAHPjRBcvUzEOTAhyZIrn4GgjTkr/ruNKb9Ier+7nURhCALEOTmwAQhyOTvXhdBCLIAQW4OTBCCTP7udRGEIAsQpClwN6c83K783X+gpvVVCUEIQpCoh9QlgmytQ5Dc+QwEIQhBoh5SlwiytQ5BcuczEIQgBIl6SF0qGnj3bzX/LqYt8K75THxHghCEIFGm1KUBy08QghCEIAQhCEEIEuepOp/KlLo0YPkJQpCxggBPgSBAAEGAAIIAAQQBAggCBBAECCAIEEAQIIAgQABBgACCAAEEAQL+A0h7mXOeq/H0AAAAAElFTkSuQmCC", # noqa: E501
"refresh_at": "2021-04-21T00:00:00Z",
"terms_of_use": "http://www.terms.and.conditions.fi",
"ticket_html": "<div>...</div>",
"ticket_type": "Päivälippu",
"valid_from": "2021-04-20T00:00:00Z",
"valid_to": "2021-04-21T00:00:00Z",
}
],
}
snapshots["test_ticket_reservation 1"] = {
"id": "00000000-0000-0000-0000-000000000001",
"status": "RESERVED",
}
| 89.113636
| 2,530
| 0.785641
| 459
| 7,842
| 13.339869
| 0.411765
| 0.009799
| 0.005226
| 0.008493
| 0.949861
| 0.949861
| 0.949861
| 0.949861
| 0.949861
| 0.949861
| 0
| 0.132875
| 0.127646
| 7,842
| 87
| 2,531
| 90.137931
| 0.762169
| 0.010712
| 0
| 0.683544
| 0
| 0.025316
| 0.810525
| 0.662324
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025316
| 0
| 0.025316
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
e0b88b8343f0062876db19291557c64415602501
| 165
|
py
|
Python
|
object_detection2/modeling/proposal_generator/__init__.py
|
vghost2008/wml
|
d0c5a1da6c228e321ae59a563e9ac84aa66266ff
|
[
"MIT"
] | 6
|
2019-12-10T17:18:56.000Z
|
2022-03-01T01:00:35.000Z
|
object_detection2/modeling/proposal_generator/__init__.py
|
vghost2008/wml
|
d0c5a1da6c228e321ae59a563e9ac84aa66266ff
|
[
"MIT"
] | 2
|
2021-08-25T16:16:01.000Z
|
2022-02-10T05:21:19.000Z
|
object_detection2/modeling/proposal_generator/__init__.py
|
vghost2008/wml
|
d0c5a1da6c228e321ae59a563e9ac84aa66266ff
|
[
"MIT"
] | 2
|
2019-12-07T09:57:35.000Z
|
2021-09-06T04:58:10.000Z
|
from . import rpn, retinanet,rpn_outputs,rpn_giou_outputs,retinanet_giou_outputs,retinanet_outputs,retinanet_giou_outputs_mc,fcos,fusionpg
from . import fcos_outputs
| 82.5
| 138
| 0.890909
| 24
| 165
| 5.708333
| 0.375
| 0.240876
| 0.291971
| 0.394161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048485
| 165
| 2
| 139
| 82.5
| 0.872611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e0e671d85ed9378e479165c1b45dcd137b560a7b
| 124
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_crc.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_crc.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_crc.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.bobcat.calculators.calc_crc import Calc_CRC_Bobcat
class calc_crc_viper(Calc_CRC_Bobcat):
pass
| 31
| 75
| 0.854839
| 19
| 124
| 5.210526
| 0.578947
| 0.282828
| 0.262626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08871
| 124
| 4
| 76
| 31
| 0.876106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
1cc94330101204428cf637857883ef2424266eda
| 1,256
|
py
|
Python
|
examples/example_safe_cast.py
|
StasTune/smart-cast
|
deede3538a9d36182f206a1bbdf2624c76d4b579
|
[
"Apache-2.0"
] | null | null | null |
examples/example_safe_cast.py
|
StasTune/smart-cast
|
deede3538a9d36182f206a1bbdf2624c76d4b579
|
[
"Apache-2.0"
] | null | null | null |
examples/example_safe_cast.py
|
StasTune/smart-cast
|
deede3538a9d36182f206a1bbdf2624c76d4b579
|
[
"Apache-2.0"
] | 1
|
2017-03-12T08:36:33.000Z
|
2017-03-12T08:36:33.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @copyright 2017 TUNE, Inc. (http://www.tune.com)
# @namespace smart-cast
from pprintpp import pprint
from safe_cast import (
safe_int,
safe_float,
safe_str,
)
pprint(safe_int(4))
pprint(safe_float(4))
pprint(safe_str(4))
pprint(safe_int('4'))
pprint(safe_float('4'))
pprint(safe_str('4'))
pprint(safe_int(4.0))
pprint(safe_float(4.0))
pprint(safe_str(4.0))
pprint(safe_int('4.0'))
pprint(safe_float('4.0'))
pprint(safe_str('4.0'))
pprint(safe_int('1.0'))
pprint(safe_int('1'))
pprint(safe_int(1.0))
pprint(safe_int(1.00))
pprint(safe_int(1))
pprint(safe_float('4.1'))
pprint(safe_float('4.12'))
pprint(safe_float('4.123'))
pprint(safe_float('4.123', 4))
pprint(safe_float('4.1234', 4))
pprint(safe_float('4.12345', 4))
pprint(safe_float(4.1))
pprint(safe_float(4.12))
pprint(safe_float(4.123))
pprint(safe_float(4.123, 4))
pprint(safe_float(4.1234, 4))
pprint(safe_float(4.12345, 4))
pprint(safe_int('4.1'))
pprint(safe_int('4.12'))
pprint(safe_int('4.123'))
pprint(safe_int('4.123'))
pprint(safe_int('4.1234'))
pprint(safe_int('4.12345'))
pprint(safe_int(4.1))
pprint(safe_int(4.12))
pprint(safe_int(4.123))
pprint(safe_int(4.123))
pprint(safe_int(4.1234))
pprint(safe_int(4.12345))
| 20.590164
| 51
| 0.702229
| 235
| 1,256
| 3.561702
| 0.140426
| 0.489845
| 0.326165
| 0.267622
| 0.826762
| 0.826762
| 0.810036
| 0.810036
| 0.810036
| 0.756272
| 0
| 0.11658
| 0.078025
| 1,256
| 60
| 52
| 20.933333
| 0.606218
| 0.092357
| 0
| 0.085106
| 0
| 0
| 0.066901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.042553
| 0
| 0.042553
| 0.893617
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
1cee854da9e5aa17ab16a4370255deb2a28bedb3
| 1,784
|
py
|
Python
|
roomai/games/bang/BangAction.py
|
tonyxxq/RoomAI
|
5f28e31e659dd7808127c3c3cc386e6892a93982
|
[
"MIT"
] | 1
|
2018-11-29T01:57:18.000Z
|
2018-11-29T01:57:18.000Z
|
roomai/games/bang/BangAction.py
|
tonyxxq/RoomAI
|
5f28e31e659dd7808127c3c3cc386e6892a93982
|
[
"MIT"
] | null | null | null |
roomai/games/bang/BangAction.py
|
tonyxxq/RoomAI
|
5f28e31e659dd7808127c3c3cc386e6892a93982
|
[
"MIT"
] | null | null | null |
#!/bin/python
#coding:utf-8
from roomai.games.common import AbstractAction
class BangActionCard(AbstractAction):
def __init__(self):
self.__card__ = ""
self.__source__ = ""
self.__first_target__ = ""
self.__second_target__ = ""
@classmethod
def lookup(self, key):
raise NotImplementedError
def __get_card__(self): return self.__card__
card = property(__get_card__, doc="the card used in this action")
def __get_source__(self): return self.__source__
source = property(__get_source__, doc="the id of the player, who issues this action")
def __get_first_target__(self): return self.__first_target__
first_target = property(__get_first_target__,doc = "the first target of this action")
def __get_second_target__(self): return self.__second_target__
second_target = property(__get_second_target__, doc = "the second target of this action")
class BangActionSkill(AbstractAction):
def __init__(self):
self.__skill__ = ""
self.__from__ = ""
self.__first_target__ = ""
self.__second_target__ = ""
@classmethod
def lookup(self, key):
raise NotImplementedError
def __get_skill__(self): return self.__skill__
skill = property(__get_skill__, doc="the card used in this action")
def __get_source__(self): return self.__source__
source = property(__get_source__, doc="the id of the player, who issues this action")
def __get_first_target__(self): return self.__first_target__
first_target = property(__get_first_target__, doc="the first target of this action")
def __get_second_target__(self): return self.__second_target__
second_target = property(__get_second_target__, doc="the second target of this action")
| 31.857143
| 93
| 0.71861
| 223
| 1,784
| 4.977578
| 0.188341
| 0.118919
| 0.100901
| 0.086486
| 0.809009
| 0.756757
| 0.756757
| 0.756757
| 0.756757
| 0.756757
| 0
| 0.000697
| 0.195628
| 1,784
| 56
| 94
| 31.857143
| 0.772822
| 0.013453
| 0
| 0.685714
| 0
| 0
| 0.153496
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.342857
| false
| 0
| 0.028571
| 0.228571
| 0.657143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
e812d49dadcc03ec3d97f06dc3fda6a3a26e3dfc
| 11,370
|
py
|
Python
|
test/integration/test_merge.py
|
VitalyRomanov/py2neo
|
2d0683cf2ab8b77b0c5bbba4eade0003c68d5905
|
[
"Apache-2.0"
] | null | null | null |
test/integration/test_merge.py
|
VitalyRomanov/py2neo
|
2d0683cf2ab8b77b0c5bbba4eade0003c68d5905
|
[
"Apache-2.0"
] | null | null | null |
test/integration/test_merge.py
|
VitalyRomanov/py2neo
|
2d0683cf2ab8b77b0c5bbba4eade0003c68d5905
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2011-2021, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pytest import raises
from py2neo import Node, Relationship
from py2neo.data.operations import UniquenessError
def test_can_merge_node_that_does_not_exist(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
old_order = len(graph.nodes)
graph.merge(alice, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert graph.exists(alice)
new_order = len(graph.nodes)
assert new_order == old_order + 1
def test_can_merge_node_that_does_exist(graph, make_unique_id):
label = make_unique_id()
graph.create(Node(label, name="Alice"))
alice = Node(label, name="Alice")
old_order = len(graph.nodes)
graph.merge(alice, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert graph.exists(alice)
new_order = len(graph.nodes)
assert new_order == old_order
def test_cannot_merge_node_where_two_exist(graph, make_unique_id):
label = make_unique_id()
graph.create(Node(label, name="Alice"))
graph.create(Node(label, name="Alice"))
alice = Node(label, name="Alice")
with raises(UniquenessError):
graph.merge(alice, label, "name")
def test_can_merge_bound_node(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
graph.create(alice)
old_order = len(graph.nodes)
graph.merge(alice, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert graph.exists(alice)
new_order = len(graph.nodes)
assert new_order == old_order
def test_can_merge_node_that_does_not_exist_on_specific_label_and_key(graph, make_unique_id):
label = make_unique_id()
label_2 = make_unique_id()
alice = Node(label, label_2, name="Alice", age=33)
old_order = len(graph.nodes)
graph.merge(alice, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert graph.exists(alice)
new_order = len(graph.nodes)
assert new_order == old_order + 1
def test_can_merge_node_that_does_exist_on_specific_label_and_key_with_extra_properties(graph, make_unique_id):
label = make_unique_id()
label_2 = make_unique_id()
graph.create(Node(label, name="Alice"))
alice = Node(label, label_2, name="Alice", age=33)
old_order = len(graph.nodes)
graph.merge(alice, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert graph.exists(alice)
new_order = len(graph.nodes)
assert new_order == old_order
def test_can_merge_node_that_does_exist_on_specific_label_and_key_with_other_properties(graph, make_unique_id):
label = make_unique_id()
label_2 = make_unique_id()
graph.create(Node(label, name="Alice", age=44))
alice = Node(label, label_2, name="Alice", age=33)
old_order = len(graph.nodes)
graph.merge(alice, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert graph.exists(alice)
new_order = len(graph.nodes)
assert new_order == old_order
def test_can_merge_relationship_that_does_not_exist(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
bob = Node(label, name="Bob")
ab = Relationship(alice, "KNOWS", bob)
old_order = len(graph.nodes)
old_size = len(graph.relationships)
graph.merge(ab, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert bob.graph is graph
assert bob.identity is not None
assert ab.graph is graph
assert ab.identity is not None
assert graph.exists(alice | bob | ab)
new_order = len(graph.nodes)
new_size = len(graph.relationships)
assert new_order == old_order + 2
assert new_size == old_size + 1
def test_can_merge_relationship_where_one_node_exists(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
graph.create(alice)
bob = Node(label, name="Bob")
ab = Relationship(alice, "KNOWS", bob)
old_order = len(graph.nodes)
old_size = len(graph.relationships)
graph.merge(ab, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert bob.graph is graph
assert bob.identity is not None
assert ab.graph is graph
assert ab.identity is not None
assert graph.exists(alice | bob | ab)
new_order = len(graph.nodes)
new_size = len(graph.relationships)
assert new_order == old_order + 1
assert new_size == old_size + 1
def test_can_merge_relationship_where_all_exist(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
graph.create(Relationship(alice, "KNOWS", Node(label, name="Bob")))
bob = Node(label, name="Bob")
ab = Relationship(alice, "KNOWS", bob)
old_order = len(graph.nodes)
old_size = len(graph.relationships)
graph.merge(ab, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert bob.graph is graph
assert bob.identity is not None
assert ab.graph is graph
assert ab.identity is not None
assert graph.exists(alice | bob | ab)
new_order = len(graph.nodes)
new_size = len(graph.relationships)
assert new_order == old_order
assert new_size == old_size
def test_can_merge_relationship_with_space_in_name(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
bob = Node(label, name="Bob")
ab = Relationship(alice, "MARRIED TO", bob)
old_order = len(graph.nodes)
old_size = len(graph.relationships)
graph.merge(ab, label, "name")
assert alice.graph is graph
assert alice.identity is not None
assert bob.graph is graph
assert bob.identity is not None
assert ab.graph is graph
assert ab.identity is not None
assert graph.exists(alice | bob | ab)
new_order = len(graph.nodes)
new_size = len(graph.relationships)
assert new_order == old_order + 2
assert new_size == old_size + 1
def test_cannot_merge_non_subgraph(graph, make_unique_id):
with raises(TypeError):
graph.merge("this string is definitely not a subgraph")
def test_can_merge_three_nodes_where_none_exist(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
bob = Node(label, name="Bob")
carol = Node(label, name="Carol")
old_order = len(graph.nodes)
subgraph = alice | bob | carol
graph.merge(subgraph, label, "name")
for node in subgraph.nodes:
assert node.graph is graph
assert node.identity is not None
assert graph.exists(node)
new_order = len(graph.nodes)
assert new_order == old_order + 3
def test_can_merge_three_nodes_where_one_exists(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
bob = Node(label, name="Bob")
carol = Node(label, name="Carol")
graph.create(alice)
old_order = len(graph.nodes)
subgraph = alice | bob | carol
graph.merge(subgraph, label, "name")
for node in subgraph.nodes:
assert node.graph is graph
assert node.identity is not None
assert graph.exists(node)
new_order = len(graph.nodes)
assert new_order == old_order + 2
def test_can_merge_three_nodes_where_two_exist(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
bob = Node(label, name="Bob")
carol = Node(label, name="Carol")
graph.create(alice | bob)
old_order = len(graph.nodes)
subgraph = alice | bob | carol
graph.merge(subgraph, label, "name")
for node in subgraph.nodes:
assert node.graph is graph
assert node.identity is not None
assert graph.exists(node)
new_order = len(graph.nodes)
assert new_order == old_order + 1
def test_can_merge_three_nodes_where_three_exist(graph, make_unique_id):
label = make_unique_id()
alice = Node(label, name="Alice")
bob = Node(label, name="Bob")
carol = Node(label, name="Carol")
graph.create(alice | bob | carol)
old_order = len(graph.nodes)
subgraph = alice | bob | carol
graph.merge(subgraph, label, "name")
for node in subgraph.nodes:
assert node.graph is graph
assert node.identity is not None
assert graph.exists(node)
new_order = len(graph.nodes)
assert new_order == old_order
def test_can_merge_long_straight_walkable(graph, make_unique_id):
label = make_unique_id()
a = Node(label, name="Alice")
b = Node(label, name="Bob")
c = Node(label, name="Carol")
d = Node(label, name="Dave")
ab = Relationship(a, "KNOWS", b)
cb = Relationship(c, "KNOWS", b)
cd = Relationship(c, "KNOWS", d)
graph.create(a)
old_order = len(graph.nodes)
old_size = len(graph.relationships)
graph.merge(ab + cb + cd, label, "name")
new_order = len(graph.nodes)
new_size = len(graph.relationships)
assert new_order == old_order + 3
assert new_size == old_size + 3
def test_can_merge_long_walkable_with_repeats(graph, make_unique_id):
label = make_unique_id()
a = Node(label, name="Alice")
b = Node(label, name="Bob")
c = Node(label, name="Carol")
d = Node(label, name="Dave")
ab = Relationship(a, "KNOWS", b)
cb = Relationship(c, "KNOWS", b)
cd = Relationship(c, "KNOWS", d)
bd = Relationship(b, "KNOWS", d)
graph.create(a)
old_order = len(graph.nodes)
old_size = len(graph.relationships)
graph.merge(ab + cb + cb + bd + cd, label, "name")
new_order = len(graph.nodes)
new_size = len(graph.relationships)
assert new_order == old_order + 3
assert new_size == old_size + 4
def test_cannot_merge_without_arguments(graph, make_unique_id):
node = Node()
with raises(ValueError):
graph.merge(node)
def test_can_merge_with_arguments(graph, make_unique_id):
label_a = make_unique_id()
label_b = make_unique_id()
a = Node(label_a, a=1)
b = Node(label_b, b=2)
graph.create(a | b)
a_id = a.identity
b_id = b.identity
node = Node(label_a, label_b, a=1, b=2)
graph.merge(node, label_a, "a")
assert node.identity == a_id
assert node.identity != b_id
def test_merge_with_magic_values_overrides_arguments(graph, make_unique_id):
label_a = make_unique_id()
label_b = make_unique_id()
a = Node(label_a, a=1)
b = Node(label_b, b=2)
graph.create(a | b)
a_id = a.identity
b_id = b.identity
node = Node(label_a, label_b, a=1, b=2)
node.__primarylabel__ = label_b
node.__primarykey__ = "b"
graph.merge(node, label_a, "a")
assert node.identity != a_id
assert node.identity == b_id
| 33.052326
| 111
| 0.690501
| 1,705
| 11,370
| 4.392375
| 0.089736
| 0.066097
| 0.072106
| 0.076913
| 0.860061
| 0.842302
| 0.839097
| 0.825744
| 0.825744
| 0.822139
| 0
| 0.005741
| 0.203342
| 11,370
| 343
| 112
| 33.148688
| 0.821042
| 0.052683
| 0
| 0.833333
| 0
| 0
| 0.033191
| 0
| 0
| 0
| 0
| 0
| 0.297872
| 1
| 0.074468
| false
| 0
| 0.010638
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c855c7a42d4e9443b727e06c37c57d3e9671532
| 4,403
|
py
|
Python
|
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/issu/nxos/issu.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/issu/nxos/issu.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/issu/nxos/issu.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
'''NXOS implementation for ISSU triggers'''
# Python
import sys
import argparse
import logging
from os.path import basename
# ATS
from pyats import aetest
from pyats.utils.objects import R
from pyats.utils.objects import Not
# Genie Libs
from genie.libs.sdk.libs.utils.mapping import Mapping
from genie.libs.sdk.triggers.ha.ha import TriggerIssu as CommonIssu
log = logging.getLogger(__name__)
# Trigger required data settings
# Which key to exclude for Platform Ops comparison
platform_exclude = ['maker', 'rp_uptime', 'sn', 'main_mem',
'switchover_reason', 'config_register',
'image', 'disk_used_space', 'disk_free_space',
'version', 'rp_boot_image']
platform_exclude_lxc = ['maker', 'rp_uptime', 'sn', 'main_mem',
'switchover_reason', 'config_register',
'image', 'disk_used_space', 'disk_free_space',
'version', 'rp_boot_image', 'rp']
class TriggerIssuLxc(CommonIssu):
"""Do LXC ISSU on device."""
__description__ = """"Do LXC ISSU on device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
steps:
1. Learn Platform Ops object and store the state of active rp, otherwise, SKIP the trigger
2. Do Issue on device.
3. Learn Platform Ops again and the ops are the same as the Ops in step 1
"""
# Parse argv for '--issu_upgrade_image'
def parse_args(self, argv):
parser = argparse.ArgumentParser()
parser.add_argument('--issu_upgrade_image',
default=None,
help='URL path of the ISSU upgrade image')
args, unknown = parser.parse_known_args(argv)
self.parameters['upgrade_image'] = args.issu_upgrade_image
mapping = Mapping(
requirements={
'ops.platform.platform.Platform': {
'requirements': [
['slot', 'rp', '(?P<rp>.*)', 'state',
'(?P<status>active)']
],
'exclude': platform_exclude_lxc}},
verify_ops={
'ops.platform.platform.Platform': {
'requirements': [
['slot', 'rp', Not('(?P<rp>.*)'), 'state',
'(?P<status>active)']
],
'exclude': platform_exclude_lxc}},
num_values={'rp': 'all', 'status': 'all'})
class TriggerIssuNative(CommonIssu):
"""Do Native ISSU on device."""
__description__ = """"Do Native ISSU on device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
steps:
1. Learn Platform Ops object and store the state of active rp, otherwise, SKIP the trigger
2. Do Issue on device.
3. Learn Platform Ops again and the ops are the same as the Ops in step 1
"""
# Parse argv for '--issu_upgrade_image'
def parse_args(self, argv):
parser = argparse.ArgumentParser()
parser.add_argument('--issu_upgrade_image',
default=None,
help='URL path of the ISSU upgrade image')
args, unknown = parser.parse_known_args(argv)
self.parameters['upgrade_image'] = args.issu_upgrade_image
mapping = Mapping(
requirements={
'ops.platform.platform.Platform': {
'requirements': [
['slot', 'rp', '(?P<active_rp>.*)', 'state', 'active']],
'all_keys': False,
'exclude': platform_exclude}},
verify_ops={
'ops.platform.platform.Platform': {
'requirements': [
['slot', 'rp', '(?P<active_rp>.*)', 'state', 'active']],
'all_keys': False,
'exclude': platform_exclude}},
num_values={'active_rp': 1})
| 36.691667
| 99
| 0.551215
| 470
| 4,403
| 5.002128
| 0.285106
| 0.051042
| 0.054445
| 0.045938
| 0.80051
| 0.746916
| 0.746916
| 0.746916
| 0.746916
| 0.720119
| 0
| 0.006511
| 0.33727
| 4,403
| 119
| 100
| 37
| 0.799178
| 0.060186
| 0
| 0.755556
| 0
| 0
| 0.468416
| 0.029155
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0
| 0.1
| 0
| 0.188889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c979ef9ecef3903f7facb2682322bbdbe5f9a3f
| 144
|
py
|
Python
|
sim.py
|
scizzorz/yak-simulator
|
d3d2d4a9748f71a5bfcba2e52e4e8157b2bb6cb6
|
[
"MIT"
] | null | null | null |
sim.py
|
scizzorz/yak-simulator
|
d3d2d4a9748f71a5bfcba2e52e4e8157b2bb6cb6
|
[
"MIT"
] | null | null | null |
sim.py
|
scizzorz/yak-simulator
|
d3d2d4a9748f71a5bfcba2e52e4e8157b2bb6cb6
|
[
"MIT"
] | null | null | null |
from base import client
from base import config
def compose(msg):
return client.compose_yak(msg, config['lat'], config['long'], handle=True)
| 24
| 76
| 0.75
| 22
| 144
| 4.863636
| 0.636364
| 0.149533
| 0.261682
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 144
| 5
| 77
| 28.8
| 0.849206
| 0
| 0
| 0
| 0
| 0
| 0.048611
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
98bab4d543bf634bd3a3f24d956038ea80cc60e6
| 92,276
|
py
|
Python
|
test/tree_disconnect_test.py
|
abhilashabhardwaj/pike
|
a1ad05b37231d8ac0a0442ab8d32a363e75ada9a
|
[
"Apache-2.0"
] | null | null | null |
test/tree_disconnect_test.py
|
abhilashabhardwaj/pike
|
a1ad05b37231d8ac0a0442ab8d32a363e75ada9a
|
[
"Apache-2.0"
] | null | null | null |
test/tree_disconnect_test.py
|
abhilashabhardwaj/pike
|
a1ad05b37231d8ac0a0442ab8d32a363e75ada9a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) Calsoft. All rights reserved.
#
# Module Name:
#
# tree_disconnect_test.py
#
# Abstract:
#
# Test cases for Tree Disconnect Command.
#
# Authors: Prayas Gupta (prayas.gupta@calsoftinc.com)
#
import pike.model
import pike.smb2
import pike.test
import utils
import array
import random
import itertools
class TreeDisconnectTest(pike.test.PikeTest):
def test_01_valid_input(self):
try:
print "\n------------------SMB2_TREE_DISCONNECT_TEST01-------------------------------------"
print "TC 01 - Testing SMB2_TREE_DISCONNECT with valid inputs"
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect is successfull."
print "Sending tree disconnect request..."
conv_obj=utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree,structure_size=4,reserved=0)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
actual_status = str(res[0].status)
except Exception as e:
actual_status = str(e)
print "Actual status: ",actual_status
self.assertIn(expected_status,actual_status)
print "TC 01 Passed"
def test_02_less_strucsize(self):
try:
print "\n------------------SMB2_TREE_DISCONNECT_TEST02-------------------------------------"
print "TC 02 - Testing SMB2_TREE_DISCONNECT for structureSize less than valid structureSize"
expected_status = "STATUS_INVALID_PARAMETER"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect is successfull."
print "Sending Tree disconnect request..."
conv_obj=utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree,structure_size=2,reserved=0)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
actual_status = str(res[0].status)
except Exception as e:
actual_status = str(e)
print "Actual status: ",actual_status
self.assertIn(expected_status,actual_status)
print "TC 02 Passed"
def test_03_greater_strucsize(self):
try:
print "\n------------------SMB2_TREE_DISCONNECT_TEST03-------------------------------------"
print "TC 03 - Testing SMB2_TREE_DISCONNECT for structureSize greater than valid structureSize"
expected_status = "STATUS_INVALID_PARAMETER"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect is successfull."
print "Sending Tree disconnect request..."
conv_obj=utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree,structure_size=8,reserved=0)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
actual_status = str(res[0].status)
except Exception as e:
actual_status = str(e)
print "Actual status: ",actual_status
self.assertIn(expected_status,actual_status)
print "TC 03 Passed"
def test_04_greater_reserved(self):
try:
print "\n------------------SMB2_TREE_DISCONNECT_TEST04-------------------------------------"
print "TC 04 - Testing SMB2_TREE_DISCONNECT for reserved value greater than zero."
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect is successfull."
print "Sending Tree disconnect request..."
conv_obj=utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree,structure_size=4,reserved=8)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
actual_status = str(res[0].status)
except Exception as e:
actual_status = str(e)
print "Actual status: ",actual_status
self.assertIn(expected_status,actual_status)
print "TC 04 Passed"
def test_05_tree_disc_consecutive(self):
try:
print "\n------------------SMB2_TREE_DISCONNECT_TEST05-------------------------------------"
print "TC 05 - Testing SMB2_TREE_DISCONNECT for consecutive tree disconnect requests."
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect is successfull."
print "Sending first TREE_DISCONNECT request..."
conv_obj=utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect first request successfull."
print "Sending second TREE_DISCONNECT request..."
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res2 = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect second request successfull."
actual_status = str(res2[0].status)
except Exception as e:
actual_status = str(e)
print "Actual status: ",actual_status
self.assertIn(expected_status,actual_status)
print "TC 05 Passed"
def test_06_tree_disc_close_fh(self):
try:
print "\n------------------SMB2_TREE_DISCONNECT_TEST06-------------------------------------"
print "TC 06 - Testing SMB2_TREE_DISCONNECT followed by closing file handle."
expected_status = "STATUS_FILE_CLOSED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect is successfull."
print "Create a file TREE_DISCONNECT_TEST06 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST06").result()
print "File creation is successfull."
print "Sending TREE_DISCONNECT request..."
conv_obj=utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect request successfull."
print "Closing file handle"
chan.close(file_handle)
actual_status = "STATUS_SUCCESS"
print "Unexpected success. File handle closed successfully."
except Exception as e:
actual_status = str(e)
print "Actual status: ",actual_status
self.assertIn(expected_status,actual_status)
print "TC 06 Passed"
def test_07_tree_disconnect_open_file(self):
try:
print "\n------------------SMB2_TREE_DISCONNECT_TEST07-------------------------------------"
print "TC 07 - Create file TREE_DISCONNECT_TEST07 and do the Treedisconnect. Send create open request for TREE_DISCONNECT_TEST07 file."
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect is successfull."
print "Create a file TREE_DISCONNECT_TEST07 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST07").result()
print "File creation is successfull."
print "Sending TREE_DISCONNECT request..."
conv_obj=utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfully."
print "Sending create Open request on the file TREE_DISCONNECT_TEST07"
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST07", disposition=pike.smb2.FILE_OPEN).result()
actual_status = "UNEXPECTED_SUCCESS"
print "Unexpected success. File TREE_DISCONNECT_TEST07 opened"
except Exception as e:
actual_status = str(e)
print "Actual status: ",actual_status
self.assertIn(expected_status,actual_status)
print "TC 07 Passed"
def test_08_tree_disconnect_read(self):
try:
print "\n------------------TREE_DISCONNECT_TEST08-------------------------------------"
print "TC 08 - Create file TREE_DISCONNECT_TEST08 and do the treedisconnect. Send read request on the TREE_DISCONNECT_TEST08 file."
buffer = "testing 123"
expected_status = "STATUS_FILE_CLOSED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect is successfull."
print "Create a file TREE_DISCONNECT_TEST08 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST08").result()
print "File creation is successfull."
print "Sending write request"
bytes_written = chan.write(file_handle,0,buffer)
print "Written data onto the file"
print "Sending TREE_DISCONNECT request..."
conv_obj=utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Sending read request"
read_packet = conv_obj.read(chan, file_handle)
read_res = conv_obj.transceive(chan, read_packet)
actual_status = str(read_res[0].status)
print "Unexpected success : Successfully read data from the file"
except Exception as e:
actual_status = str(e)
print "Actual status: ",actual_status
self.assertIn(expected_status,actual_status)
print "TC 08 Passed"
def test_09_tree_disconnect_write(self):
try:
print "\n------------------TREE_DISCONNECT_TEST09-------------------------------------"
print "TC 09 - Create file TREE_DISCONNECT_TEST09 and do the treedisconnect. Send write request on the TREE_DISCONNECT_TEST09 file."
buffer = "testing 123"
expected_status = "STATUS_FILE_CLOSED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST09 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST09").result()
print "File created."
print "Sending TREE_DISCONNECT request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Sending write request..."
bytes_written = chan.write(file_handle, 0, buffer)
actual_status = "STATUS_SUCCESS"
print "Unexpected success. Written data onto the file"
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 09 Passed"
def test_10_tree_disconnect_invalid_treeid(self):
try:
print "\n------------------TREE_DISCONNECT_TEST10-------------------------------------"
print "TC 10 - Testing TREE_DISCONNECT with inavlid treeid"
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
correct_tree_id = tree.tree_id
print "Tree ID for first tree connect is :",correct_tree_id
print "Sending TREE_DISCONNECT request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
tree_disconnect_req[0].tree_id = correct_tree_id + 6
print "Tree disconnect after setting tree id ",tree_disconnect_req
res = conv_obj.transceive(chan, tree_disconnect_req)
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 10 Passed"
def test_11_tree_disconnect_set(self):
try:
print "\n------------------TREE_DISCONNECT_TEST11-------------------------------------"
print "TC 11 - Testing TREE_DISCONNECT request followed by set info request."
expected_status = "STATUS_FILE_CLOSED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST11 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST11").result()
print "File created."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Sending set_info request on TREE_DISCONNECT_TEST11."
requested_file_attr = pike.smb2.FILE_ATTRIBUTE_READONLY
with chan.set_file_info(file_handle, pike.smb2.FileBasicInformation) as file_info:
file_info.file_attributes = requested_file_attr
if requested_file_attr == file_info.file_attributes:
actual_status = "UNEXPECTED_SUCCESS"
print "Unexpected success. File attribute information set on TREE_DISCONNECT_TEST11."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 11 Passed"
def test_12_tree_disconnect_query_file(self):
try:
print "\n------------------TREE_DISCONNECT_TEST12-------------------------------------"
print "TC 12 - Testing TREE_DISCONNECT followed by query file info request."
expected_status = "STATUS_FILE_CLOSED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST12 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST12").result()
print "File created."
print "Sending TREE_DISCONNECT request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Querying file info on TREE_DISCONNECT_TEST12 file"
info = chan.query_file_info(file_handle, pike.smb2.FILE_ALL_INFORMATION)
actual_status = str(info.status)
print "Unexpected success. Query file info on TREE_DISCONNECT_TEST12 done."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 12 Passed"
def test_13_tree_disconnect_durable_open(self):
try:
print "\n------------------TREE_DISCONNECT_TEST13-------------------------------------"
print "TC 13 - Create file TREE_DISCONNECT_TEST13 with durable set and do the tree disconnect.Send create open request on TREE_DISCONNECT_TEST13 file with same session and treeid ."
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST13 for testing"
conv_obj = utils.Convenience()
create_tmp1, create_resp1 = conv_obj.create(chan, tree,"TREE_DISCONNECT_TEST13",
access=pike.smb2.GENERIC_READ,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
attributes=pike.smb2.FILE_ATTRIBUTE_TEMPORARY,
options=pike.smb2.FILE_NO_EA_KNOWLEDGE,
name_offset=120,
name_length=26,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_BATCH,
durable=True)
file_handle1 = create_tmp1.result()
create_result1 = create_resp1[0].result()
print "Verifying durable handle..."
assert "DurableHandleResponse" in str(create_result1[0].children), "Durable handle response not found in Create response."
print "Durable handle validation has passed."
print "File created."
print "Sending TREE_DISCONNECT request..."
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "TREE_DISCONNECT successfull."
print "Sending create Open request on the file TREE_DISCONNECT_TEST13..."
create_tmp2, create_resp2 = conv_obj.create(chan, tree,
'TREE_DISCONNECT_TEST13',
access=pike.smb2.FILE_WRITE_DATA,
attributes=pike.smb2.FILE_ATTRIBUTE_TEMPORARY,
share=pike.smb2.FILE_SHARE_WRITE,
disposition=pike.smb2.FILE_OPEN,
name_offset=120,
name_length=26,
options=pike.smb2.FILE_NO_EA_KNOWLEDGE)
file_handle2 = create_tmp2.result()
print "File TREE_DISCONNECT_TEST13 opened."
create_result2 = create_resp2[0].result()
print "Verifying durable handle..."
assert "DurableHandleResponse" in str(create_result2[0].children), "Durable handle response not found in Create response."
print "Durable reconnect validation has passed."
actual_status = str(create_result2.status)
print "Unexpected success. File open successfull."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 13 Passed"
def test_14_tree_disconnect_invalid_session_id(self):
try:
print "\n------------------TREE_DISCONNECT_TEST14-------------------------------------"
print "TC 14 - Testing SMB2_TREE_DISCONNECT with invalid session id."
expected_status = "STATUS_USER_SESSION_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Sending TREE_DISCONNECT request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree,structure_size=4,reserved=0)
tree_disconnect_req[0].session_id = 1234561234567
res = conv_obj.transceive(chan, tree_disconnect_req)
actual_status = str(res[0].status)
print "Unexpected success. Tree Disconnect successfull."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 14 Passed"
def test_15_tree_disconnect_shared_lock(self):
try:
print "\n------------------TREE_DISCONNECT_TEST15-------------------------------------"
print "TC 15 - Lock file with SMB2_LOCKFLAG_SHARED_LOCK and do the tree disconnect. With old session and new tree_id try SMB2_LOCKFLAG_EXCLUSIVE_LOCK on same byte ranges."
expected_status = "STATUS_SUCCESS"
lock1 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_SHARED_LOCK)]
lock2 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_EXCLUSIVE_LOCK)]
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST15 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST15").result()
print "File created."
print "Sending Lock request with SMB2_LOCKFLAG_SHARED_LOCK flag..."
chan.lock(file_handle1, lock1).result()
print "File locked with shared lock on requested byte ranges."
print "Sending tree disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successful."
print "Open file TREE_DISCONNECT_TEST15 for testing"
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST15", disposition=pike.smb2.FILE_OPEN).result()
print "File TREE_DISCONNECT_TEST15 opened."
print "Sending Lock request with SMB2_LOCKFLAG_EXCLUSIVE_LOCK flag..."
lock_res2 = chan.lock(file_handle2, lock2).result()
actual_status = str(lock_res2.status)
print "File locked with shared lock on requested byte ranges."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 15 Passed"
def test_16_tree_disconnect_exclusive_lock(self):
try:
print "\n------------------TREE_DISCONNECT_TEST16-------------------------------------"
print "TC 16 - Lock file with SMB2_LOCKFLAG_EXCLUSIVE_LOCK and do the tree disconnect.With old session and new tree_id try SMB2_LOCKFLAG_EXCLUSIVE_LOCK on same byte ranges."
expected_status = "STATUS_SUCCESS"
lock1 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_EXCLUSIVE_LOCK)]
lock2 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_EXCLUSIVE_LOCK)]
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Create a file TREE_DISCONNECT_TEST16 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST16").result()
print "File created."
print "Sending Lock request with SMB2_LOCKFLAG_EXCLUSIVE_LOCK flag..."
chan.lock(file_handle1, lock1).result()
print "File locked with exclusive lock on requested byte ranges."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successful."
print "Open file TREE_DISCONNECT_TEST16 for testing"
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST16", disposition=pike.smb2.FILE_OPEN).result()
print "File TREE_DISCONNECT_TEST16 opened."
print "Sending Lock request with SMB2_LOCKFLAG_EXCLUSIVE_LOCK flag..."
lock_res2 = chan.lock(file_handle2, lock2).result()
actual_status = str(lock_res2.status)
print "File locked with shared lock on requested byte ranges."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 16 Passed"
def test_17_tree_disconnect_shared_lock(self):
try:
print "\n------------------TREE_DISCONNECT_TEST17-------------------------------------"
print "TC 17 - Lock file with SMB2_LOCKFLAG_SHARED_LOCK|SMB2_LOCKFLAG_FAIL_IMMEDIATELY and do the tree disconnect.With old session and new tree_id try SMB2_LOCKFLAG_EXCLUSIVE_LOCK on same byte ranges."
expected_status = "STATUS_SUCCESS"
lock1 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_SHARED_LOCK | pike.smb2.SMB2_LOCKFLAG_FAIL_IMMEDIATELY)]
lock2 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_EXCLUSIVE_LOCK)]
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST18 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST18").result()
print "File created."
print "Sending Lock request with SMB2_LOCKFLAG_SHARED_LOCK|SMB2_LOCKFLAG_FAIL_IMMEDIATELY flag..."
chan.lock(file_handle1, lock1).result()
print "File locked with shared lock on requested byte ranges."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second Tree connect successfull."
print "Open file TREE_DISCONNECT_TEST18 for testing"
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST18", disposition=pike.smb2.FILE_OPEN).result()
print "File TREE_DISCONNECT_TEST18 opened."
print "Sending Lock request with SMB2_LOCKFLAG_EXCLUSIVE_LOCK flag..."
lock_res2 = chan.lock(file_handle2, lock2).result()
actual_status = str(lock_res2.status)
print "File locked with shared lock on requested byte ranges."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 17 Passed"
def test_18_tree_disconnect_exclusive_lock(self):
try:
print "\n------------------TREE_DISCONNECT_TEST18-------------------------------------"
print "TC 18 - Lock file with SMB2_LOCKFLAG_EXCLUSIVE_LOCK|SMB2_LOCKFLAG_FAIL_IMMEDIATELY and do the tree disconnect. With old session and new tree_id try SMB2_LOCKFLAG_EXCLUSIVE_LOCK on same byte ranges."
expected_status = "STATUS_SUCCESS"
lock1 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_EXCLUSIVE_LOCK | pike.smb2.SMB2_LOCKFLAG_FAIL_IMMEDIATELY)]
lock2 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_EXCLUSIVE_LOCK)]
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST18 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST18").result()
print "File created."
print "Sending Lock request with SMB2_LOCKFLAG_EXCLUSIVE_LOCK|SMB2_LOCKFLAG_FAIL_IMMEDIATELY flag..."
chan.lock(file_handle1, lock1).result()
print "File locked with exclusive lock on requested byte ranges."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second Tree connect successfull."
print "Open file TREE_DISCONNECT_TEST18 for testing"
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST18", disposition=pike.smb2.FILE_OPEN).result()
print "File TREE_DISCONNECT_TEST18 opened."
print "Sending Lock request with SMB2_LOCKFLAG_EXCLUSIVE_LOCK flag..."
lock_res2 = chan.lock(file_handle2, lock2).result()
actual_status = str(lock_res2.status)
print "File locked with shared lock on requested byte ranges."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 18 Passed"
def test_19_tree_disconnect_unlock(self):
try:
print "\n------------------TREE_DISCONNECT_TEST19-------------------------------------"
print "TC 19 - Lock file with SMB2_LOCKFLAG_EXCLUSIVE_LOCK and do the tree disconnect.With old session and new treeid try to unlock same byte ranges."
expected_status = "STATUS_RANGE_NOT_LOCKED"
lock1 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_EXCLUSIVE_LOCK)]
lock2 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_UN_LOCK)]
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT _TEST19 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST19", disposition=pike.smb2.FILE_OPEN_IF).result()
print "File created."
print "Sending Lock request with SMB2_LOCKFLAG_SHARED_LOCK..."
lock_res1 = chan.lock(file_handle, lock1).result()
print "File locked with shared lock on requested byte ranges."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successfull."
print "Open file TREE_DISCONNECT_TEST19 for testing"
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST19", disposition=pike.smb2.FILE_OPEN).result()
print "File TREE_DISCONNECT_TEST19 opened."
print "Sending Lock request with SMB2_LOCKFLAG_UN_LOCK flag..."
lock_res2 = chan.lock(file_handle2, lock2).result()
actual_status = str(lock_res2.status)
print "Unexpected success. Unlocked requested byte ranges."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 19 Passed"
def test_20_tree_disconnect_shared_unlock(self):
try:
print "\n------------------TREE_DISCONNECT_TEST20-------------------------------------"
print "TC 20 - Lock file TREE_DISCONNECT_TEST20 with SMB2_LOCKFLAG_SHARED_LOCK and do the tree disconnect.Send unlock request on same byte ranges."
expected_status = "STATUS_FILE_CLOSED"
locks1 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_SHARED_LOCK)]
locks2 = [(8, 8, pike.smb2.SMB2_LOCKFLAG_UN_LOCK)]
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST20 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST20", disposition=pike.smb2.FILE_OPEN_IF).result()
print "File created."
print "Sending Lock request with SMB2_LOCKFLAG_SHARED_LOCK..."
lock_res1 = chan.lock(file_handle, locks1).result()
print "File locked with shared lock on requested byte ranges."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Sending Lock request with SMB2_LOCKFLAG_UN_LOCK flag..."
lock_res2 = chan.lock(file_handle, locks2).result()
actual_status = str(lock_res2.status)
print "Unexpected success. Unlocked requested byte ranges."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 20 Passed"
def test_21_tree_disconnect_oplock_none(self):
try:
print "\n------------------TREE_DISCONNECT_TEST21-------------------------------------"
print "TC 21 - Create file TREE_DISCONNECT_TEST21 with SMB2_OPLOCK_LEVEL_NONE and send tree disconnect request.With old session and new treeid try to send create open request with EXCLUSIVE oplock"
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST21 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST21",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_NONE).result()
print "File created with SMB2_OPLOCK_LEVEL_NONE."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successfull."
print "Open file TREE_DISCONNECT_TEST21 for testing oplock level with EXCLUSIVE."
requested_oplock = pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST21",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=requested_oplock).result()
if requested_oplock == file_handle2.oplock_level:
actual_status = "STATUS_SUCCESS"
print "File opened with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 21 Passed"
def test_22_tree_disconnect_oplock_2(self):
try:
print "\n------------------TREE_DISCONNECT_TEST22-------------------------------------"
print "TC 22 - Create file TREE_DISCONNECT_TEST22 with SMB2_OPLOCK_LEVEL_II and send tree disconnect request. With old session_id and new tree_id try to send create open request with EXCLUSIVE oplock"
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST22 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST22",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_II).result()
print "File created with SMB2_OPLOCK_LEVEL_II."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successfull."
print "Open file TREE_DISCONNECT_TEST22 for testing oplock level with EXCLUSIVE"
requested_oplock = pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST22",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=requested_oplock).result()
if requested_oplock == file_handle2.oplock_level:
actual_status = "STATUS_SUCCESS"
print "File opened with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 22 Passed"
def test_23_tree_disconnect_oplock_exclusive(self):
try:
print "\n------------------TREE_DISCONNECT_TEST23-------------------------------------"
print "TC 23 - Create file TREE_DISCONNECT_TEST23 with SMB2_OPLOCK_LEVEL_EXCLUSIVE and send tree disconnect request. With old session_id and new tree_id try to send create open request with EXCLUSIVE oplock."
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST23 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST23",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE).result()
print "File created with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successfull."
print "Open file TREE_DISCONNECT_TEST23 for testing oplock level with EXCLUSIVE."
requested_oplock = pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST23",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=requested_oplock).result()
if requested_oplock == file_handle2.oplock_level:
actual_status = "STATUS_SUCCESS"
print "File opened with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 23 Passed"
def test_24_tree_disconnect_oplock_batch(self):
try:
print "\n-----------------TREE_DISCONNECT_TEST24-------------------------------------"
print "TC 24 - Create file TREE_DISCONNECT_TEST24 with SMB2_OPLOCK_LEVEL_BATCH and send tree disconnect request. With old session_id and new tree_id try to send create open request with EXCLUSIVE oplock."
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST24 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST24",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_BATCH).result()
print "File created with SMB2_OPLOCK_LEVEL_BATCH."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successfull."
print "Open file TREE_DISCONNECT_TEST24 for testing oplock level with EXCLUSIVE."
requested_oplock = pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST24",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=requested_oplock).result()
if requested_oplock == file_handle2.oplock_level:
actual_status = "STATUS_SUCCESS"
print "File opened with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 24 Passed"
def test_25_tree_disconnect_rwh_lease(self):
try:
print "\n------------------TREE_DISCONNECT_TEST25-------------------------------------"
print "TC 25 - Create file TREE_DISCONNECT_TEST25 with SMB2_OPLOCK_LEVEL_LEASE and lease_state=RWH, send tree disconnect request.With old session_id and new tree_id try to send create open request"
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Create a file TREE_DISCONNECT_TEST25 for testing"
self.lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST25",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=self.lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
print "File created with SMB2_OPLOCK_LEVEL_LEASE and RWH lease state."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successful."
print "Open file TREE_DISCONNECT_TEST25 for testing oplock level with EXCLUSIVE."
requested_lease_state = pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST25",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=self.lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
if requested_lease_state == file_handle2.lease.lease_state:
actual_status = "STATUS_SUCCESS"
print "File opened with RWH lease."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 25 Passed"
def test_26_tree_disconnect_rw_lease(self):
try:
print "\n------------------TREE_DISCONNECT_TEST26-------------------------------------"
print "TC 26 - Create file TREE_DISCONNECT_TEST26 with SMB2_OPLOCK_LEVEL_LEASE and lease_state=RW, send tree disconnect request.With old session_id and new tree_id try to send create open request"
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Create a file TREE_DISCONNECT_TEST26 for testing"
self.lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST26",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=self.lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING).result()
print "File created with SMB2_OPLOCK_LEVEL_LEASE and RW lease state."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successful."
print "Open file TREE_DISCONNECT_TEST26 for testing oplock level with EXCLUSIVE."
requested_lease_state = pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST26",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=self.lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
if requested_lease_state == file_handle2.lease.lease_state:
actual_status = "STATUS_SUCCESS"
print "File opened with RWH lease."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 26 Passed"
def test_27_tree_disconnect_rh_lease(self):
try:
print "\n------------------TREE_DISCONNECT_TEST27-------------------------------------"
print "TC 27 - Create file TREE_DISCONNECT_TEST27 with SMB2_OPLOCK_LEVEL_LEASE and lease_state=RH, send tree disconnect request.With old session_id and new tree_id try to send create open request"
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Create a file TREE_DISCONNECT_TEST27 for testing"
lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST27",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
print "File created with SMB2_OPLOCK_LEVEL_LEASE and RH lease state."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successful."
print "Open file TREE_DISCONNECT_TEST27 for testing oplock level with EXCLUSIVE."
requested_lease_state = pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST27",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
if requested_lease_state == file_handle2.lease.lease_state:
actual_status = "STATUS_SUCCESS"
print "File opened with RWH lease."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 27 Passed"
def test_28_tree_disconnect_lease_none(self):
try:
print "\n------------------TREE_DISCONNECT_TEST28-------------------------------------"
print "TC 28 - Create file TREE_DISCONNECT_TEST28 with SMB2_OPLOCK_LEVEL_LEASE and lease_state=NONE, send tree disconnect request.With old session_id and new tree_id try to send create open request"
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Create a file TREE_DISCONNECT_TEST28 for testing"
lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST28",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=lease1,
lease_state=pike.smb2.SMB2_LEASE_NONE).result()
print "File created with SMB2_OPLOCK_LEVEL_LEASE and lease state NONE."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successful."
print "Open file TREE_DISCONNECT_TEST28 for testing oplock level with EXCLUSIVE."
requested_lease_state = pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING
file_handle2 = chan.create(tree2, "TREE_DISCONNECT_TEST28",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
if requested_lease_state == file_handle2.lease.lease_state:
actual_status = "STATUS_SUCCESS"
print "File opened with RWH lease."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 28 Passed"
def test_29_lease_none_same_treeid(self):
try:
print "\n------------------TREE_DISCONNECT_TEST29-------------------------------------"
print "TC 29 - Create file TREE_DISCONNECT_TEST29 with SMB2_OPLOCK_LEVEL_LEASE and lease_state=NONE, send tree disconnect request. With old session_id and old tree_id try to send create open request"
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST29 for testing"
lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST29",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=lease1,
lease_state=pike.smb2.SMB2_LEASE_NONE).result()
print "File created with SMB2_OPLOCK_LEVEL_LEASE and lease state NONE."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Open file TREE_DISCONNECT_TEST29 for testing oplock level with EXCLUSIVE."
requested_lease_state = pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST29",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
if requested_lease_state == file_handle2.lease.lease_state:
actual_status = "STATUS_SUCCESS"
print "Unexpected success. File opened with RWH lease."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 29 Passed"
def test_30_tree_disconnect_flush(self):
try:
print "\n------------------TREE_DISCONNECT_TEST30-------------------------------------"
print "TC 30 - Create file TREE_DISCONNECT_TEST30 and do the tree disconnect. Send flush request on the TREE_DISCONNECT_TEST30 file with same session_id."
buffer = "testing 123"
expected_status = "STATUS_FILE_CLOSED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Create a file TREE_DISCONNECT_TEST30 for testing"
file_handle = chan.create(tree, "TREE_DISCONNECT_TEST30", disposition=pike.smb2.FILE_OPEN_IF).result()
print "File created."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Sending flush request..."
flush_packet = conv_obj.flush(chan, tree, file_id=file_handle.file_id)
res1 = conv_obj.transceive(chan, flush_packet)
actual_status = str(res1[0].status)
print "Unexpected success. Flush request successful."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 30 Passed"
def test_31_tree_disconnect_echo(self):
try:
print "\n------------------TREE_DISCONNECT_TEST31-------------------------------------"
print "TC 31 - Create file TREE_DISCONNECT_TEST31 and do the tree disconnect. Send echo request."
buffer = "testing 123"
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Sending echo request..."
echo_packet = conv_obj.echo(chan)
res2 = conv_obj.transceive(chan,echo_packet)
actual_status = str(res2[0].status)
print "Echo request successfully processed."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 31 Passed"
def test_32_tree_disconnect_query(self):
try:
print "\n------------------TREE_DISCONNECT_TEST32-------------------------------------"
print "TC 32 - Testing Tree disconnect followed by query directory request."
expected_status = "STATUS_FILE_CLOSED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Tree connection successful."
print "Create a file TREE_DISCONNECT_TEST32 for testing"
dir_handle = chan.create(tree, "TREE_DISCONNECT_TEST32", disposition=pike.smb2.FILE_OPEN_IF, options=pike.smb2.FILE_DIRECTORY_FILE).result()
print "File created."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Querying directory information on TREE_DISCONNECT_TEST32."
names = map(lambda info: info.file_name, chan.query_directory(dir_handle))
if self.assertIn('TREE_DISCONNECT_TEST32', names):
actual_status = "STATUS_SUCCESS"
print "Unexpected success. Querying directory information on TREE_DISCONNECT_TEST32 done."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 32 Passed"
def test_33_difftree_durable_open(self):
try:
print "\n------------------TREE_DISCONNECT_TEST33-------------------------------------"
print "TC 33 - Create file TREE_DISCONNECT_TEST33 with durable set and do the tree disconnect.Send create open request on TREE_DISCONNECT_TEST33 file with same session and different treeid ."
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Create a file TREE_DISCONNECT_TEST33 for testing"
lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
conv_obj = utils.Convenience()
create_tmp1, create_resp1 = conv_obj.create(chan, tree, 'TREE_DISCONNECT_TEST33',
access=pike.smb2.GENERIC_READ,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
attributes=pike.smb2.FILE_ATTRIBUTE_TEMPORARY,
options=pike.smb2.FILE_NO_EA_KNOWLEDGE,
name_offset=120,
name_length=26,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_BATCH,
durable=True)
file_handle1 = create_tmp1.result()
print "File created."
create_result1 = create_resp1[0].result()
print "Verifying durable handle..."
assert "DurableHandleResponse" in str(create_result1[0].children), "Durable handle response not found in Create response."
print "Durable handle validation has passed."
print "Sending TREE_DISCONNECT request..."
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "TREE_DISCONNECT successfull."
print "Sending create Open request on the file TREE_DISCONNECT_TEST33..."
print "Creating new treeid"
tree2 = chan.tree_connect(self.share)
print "New tree connect successfull"
print "Sending create Open request with different treeid on the file TREE_DISCONNECT_TEST33..."
create_tmp2, create_resp2 = conv_obj.create(chan, tree2,
'TREE_DISCONNECT_TEST33',
access=pike.smb2.FILE_WRITE_DATA,
attributes=pike.smb2.FILE_ATTRIBUTE_TEMPORARY,
share=pike.smb2.FILE_SHARE_WRITE,
disposition=pike.smb2.FILE_OPEN,
name_offset=120,
name_length=26,
options=pike.smb2.FILE_NO_EA_KNOWLEDGE)
file_handle2 = create_tmp2.result()
print "File TREE_DISCONNECT_TEST33 opened."
create_result2 = create_resp2[0].result()
actual_status = str(create_result2.status)
print "File open successful."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 33 Passed"
def test_34_oplock_none_same_treeid(self):
try:
print "\n------------------TREE_DISCONNECT_TEST34-------------------------------------"
print "TC 34 - Create file TREE_DISCONNECT_TEST34 with SMB2_OPLOCK_LEVEL_NONE and send tree disconnect request.With old session and old treeid try to send create open request with EXCLUSIVE oplock"
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST34 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST34",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_NONE).result()
print "File created with SMB2_OPLOCK_LEVEL_NONE."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Open file TREE_DISCONNECT_TEST34 for testing oplock level with EXCLUSIVE."
requested_oplock = pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST34",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=requested_oplock).result()
if requested_oplock == file_handle2.oplock_level:
actual_status = "STATUS_SUCCESS"
print "Unexpected success. File opened with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 34 Passed"
def test_35_oplock_2_same_treeid(self):
try:
print "\n------------------TREE_DISCONNECT_TEST35-------------------------------------"
print "TC 35 - Create file TREE_DISCONNECT_TEST35 with SMB2_OPLOCK_LEVEL_II and send tree disconnect request. With old session_id and old tree_id try to send create open request with EXCLUSIVE oplock"
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST35 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST35",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_II).result()
print "File created with SMB2_OPLOCK_LEVEL_II."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Open file TREE_DISCONNECT_TEST35 for testing oplock level with EXCLUSIVE"
requested_oplock = pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST35",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=requested_oplock).result()
if requested_oplock == file_handle2.oplock_level:
actual_status = "STATUS_SUCCESS"
print "Unexpected success. File opened with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 35 Passed"
def test_36_oplock_exclusive_same_treeid(self):
try:
print "\n------------------TREE_DISCONNECT_TEST36-------------------------------------"
print "TC 36 - Create file TREE_DISCONNECT_TEST36 with SMB2_OPLOCK_LEVEL_EXCLUSIVE and send tree disconnect request. With old session_id and old tree_id try to send create open request with EXCLUSIVE oplock."
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST36 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST36",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE).result()
print "File created with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Open file TREE_DISCONNECT_TEST36 for testing oplock level with EXCLUSIVE."
requested_oplock = pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST36",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=requested_oplock).result()
if requested_oplock == file_handle2.oplock_level:
actual_status = "STATUS_SUCCESS"
print "Unexpected success.File opened with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 36 Passed"
def test_37_oplock_batch_same_treeid(self):
try:
print "\n-----------------TREE_DISCONNECT_TEST37-------------------------------------"
print "TC 37 - Create file TREE_DISCONNECT_TEST37 with SMB2_OPLOCK_LEVEL_BATCH and send tree disconnect request. With old session_id and old tree_id try to send create open request with EXCLUSIVE oplock."
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST37 for testing"
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST37",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_BATCH).result()
print "File created with SMB2_OPLOCK_LEVEL_BATCH."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Creating second tree connect..."
tree2 = chan.tree_connect(self.share)
print "Second tree connect successfull."
print "Open file TREE_DISCONNECT_TEST37 for testing oplock level with EXCLUSIVE."
requested_oplock = pike.smb2.SMB2_OPLOCK_LEVEL_EXCLUSIVE
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST37",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=requested_oplock).result()
if requested_oplock == file_handle2.oplock_level:
actual_status = "STATUS_SUCCESS"
print "Unexpected success. File opened with SMB2_OPLOCK_LEVEL_EXCLUSIVE."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 37 Passed"
def test_38_rwh_lease_same_treeid(self):
try:
print "\n------------------TREE_DISCONNECT_TEST38-------------------------------------"
print "TC 38 - Create file TREE_DISCONNECT_TEST38 with SMB2_OPLOCK_LEVEL_LEASE and lease_state=RWH, send tree disconnect request.With old session_id and old tree_id try to send create open request"
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST25 for testing"
self.lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST38",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=self.lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
print "File created with SMB2_OPLOCK_LEVEL_LEASE and RWH lease state."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Open file TREE_DISCONNECT_TEST38 for testing oplock level with EXCLUSIVE."
requested_lease_state = pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST38",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=self.lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
if requested_lease_state == file_handle2.lease.lease_state:
actual_status = "STATUS_SUCCESS"
print "Unexpected success. File opened with RWH lease."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 38 Passed"
def test_39_rw_lease_same_treeid(self):
try:
print "\n------------------TREE_DISCONNECT_TEST39-------------------------------------"
print "TC 39 - Create file TREE_DISCONNECT_TEST39 with SMB2_OPLOCK_LEVEL_LEASE and lease_state=RW, send tree disconnect request.With old session_id and old tree_id try to send create open request"
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successful."
print "Create a file TREE_DISCONNECT_TEST39 for testing"
self.lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST39",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=self.lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING).result()
print "File created with SMB2_OPLOCK_LEVEL_LEASE and RW lease state."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Open file TREE_DISCONNECT_TEST39 for testing oplock level with EXCLUSIVE."
requested_lease_state = pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST39",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=self.lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
if requested_lease_state == file_handle2.lease.lease_state:
actual_status = "STATUS_SUCCESS"
print "Unexpected success. File opened with RWH lease."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 39 Passed"
def test_40_rh_lease_same_treeid(self):
try:
print "\n------------------TREE_DISCONNECT_TEST40-------------------------------------"
print "TC 40 - Create file TREE_DISCONNECT_TEST40 with SMB2_OPLOCK_LEVEL_LEASE and lease_state=RH, send tree disconnect request. With old session_id and old tree_id try to send create open request"
expected_status = "STATUS_NETWORK_NAME_DELETED"
print "Expected status: ", expected_status
print "Creating session and tree connect..."
chan, tree = self.tree_connect()
print "Session setup and Tree connect successfull."
print "Create a file TREE_DISCONNECT_TEST40 for testing"
lease1 = array.array('B', map(random.randint, [0] * 16, [255] * 16))
file_handle1 = chan.create(tree, "TREE_DISCONNECT_TEST40",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN_IF,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
print "File created with SMB2_OPLOCK_LEVEL_LEASE and RH lease state."
print "Sending tree_disconnect request..."
conv_obj = utils.Convenience()
tree_disconnect_req = conv_obj.tree_disconnect(chan,tree)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
print "Open file TREE_DISCONNECT_TEST40 for testing oplock level with EXCLUSIVE."
requested_lease_state = pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING
file_handle2 = chan.create(tree, "TREE_DISCONNECT_TEST40",
access=pike.smb2.FILE_READ_DATA | pike.smb2.FILE_WRITE_DATA,
share=pike.smb2.FILE_SHARE_READ | pike.smb2.FILE_SHARE_WRITE | pike.smb2.FILE_SHARE_DELETE,
disposition=pike.smb2.FILE_OPEN,
oplock_level=pike.smb2.SMB2_OPLOCK_LEVEL_LEASE,
lease_key=lease1,
lease_state=pike.smb2.SMB2_LEASE_READ_CACHING | pike.smb2.SMB2_LEASE_WRITE_CACHING | pike.smb2.SMB2_LEASE_HANDLE_CACHING).result()
if requested_lease_state == file_handle2.lease.lease_state:
actual_status = "STATUS_SUCCESS"
print "Unexpected success. File opened with RWH lease."
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 40 Passed"
def test_41_multiple_tree_connect(self):
try:
print "\n------------------TREE_DISCONNECT_TEST41-------------------------------------"
print "TC 41 - Send multiple tree connect requests to connect to share i.e 2^16 tree connects and then send tree disconnect request for all the tree connects."
expected_status = "STATUS_SUCCESS"
print "Expected status: ", expected_status
print "Send negotiate request"
conn = pike.model.Client().connect(self.server, self.port).negotiate()
print "Negotiate request is successfull"
print "Send session setup request"
chan = conn.session_setup(self.creds)
print "Session setup successfull"
conv_obj = utils.Convenience()
print "Send 2^16 tree connect requests to access to a particular share on the server"
tree_connect_id = []
for i in range(0,65535):
print "Tree Connect number :", i
tree = chan.tree_connect(self.share)
print "The tree id is :",tree.tree_id
tree_connect_id.append(tree)
for t in tree_connect_id:
print "Disconnecting tree id:",t.tree_id
tree_disconnect_req = conv_obj.tree_disconnect(chan,t)
res = conv_obj.transceive(chan, tree_disconnect_req)
print "Tree Disconnect successfull."
actual_status = str(res[0].status)
except Exception as e:
actual_status = str(e)
print "Actual status: ", actual_status
self.assertIn(expected_status, actual_status)
print "TC 41 Passed"
| 63.20274
| 220
| 0.605455
| 10,240
| 92,276
| 5.183496
| 0.032324
| 0.125812
| 0.05132
| 0.033309
| 0.898133
| 0.888883
| 0.865672
| 0.847736
| 0.834341
| 0.812506
| 0
| 0.02427
| 0.303427
| 92,276
| 1,459
| 221
| 63.246059
| 0.801515
| 0.002146
| 0
| 0.735
| 0
| 0.018571
| 0.308912
| 0.096258
| 0
| 0
| 0
| 0
| 0.032143
| 0
| null | null | 0.031429
| 0.005
| null | null | 0.407143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
c728fb02ed31cf77d3e3996bb695d1fc61652234
| 201
|
py
|
Python
|
typogrify/tests/__init__.py
|
dryan/django-typogrify
|
fb47a3bbbf7adfa524879cb3e61517f8c3cb73dc
|
[
"BSD-3-Clause"
] | 31
|
2015-01-08T11:55:56.000Z
|
2021-09-07T08:24:52.000Z
|
typogrify/tests/__init__.py
|
dryan/django-typogrify
|
fb47a3bbbf7adfa524879cb3e61517f8c3cb73dc
|
[
"BSD-3-Clause"
] | 4
|
2016-02-08T17:53:59.000Z
|
2020-06-17T23:59:06.000Z
|
typogrify/tests/__init__.py
|
dryan/django-typogrify
|
fb47a3bbbf7adfa524879cb3e61517f8c3cb73dc
|
[
"BSD-3-Clause"
] | 14
|
2015-08-29T01:21:10.000Z
|
2022-03-20T20:37:34.000Z
|
# -*- coding: utf-8 -*-
from typogrify.tests.test_fuzzydate import *
from typogrify.tests.test_tags import *
from typogrify.tests.test_titlecase import *
__all__ = ['TestFuzzydate', 'TestTitlecase']
| 25.125
| 44
| 0.756219
| 24
| 201
| 6.041667
| 0.583333
| 0.268966
| 0.372414
| 0.455172
| 0.386207
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005618
| 0.114428
| 201
| 7
| 45
| 28.714286
| 0.808989
| 0.104478
| 0
| 0
| 0
| 0
| 0.146067
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c739b8913bfaa58489110c367aa4a7070f2a9177
| 7,451
|
py
|
Python
|
tests/src/Diksha_TPD/TPD_Enrollment_completion/check_completion_time_periods.py
|
sreenivas8084/cQube
|
3352a13f41679d707979e287d1880f0723b27510
|
[
"MIT"
] | null | null | null |
tests/src/Diksha_TPD/TPD_Enrollment_completion/check_completion_time_periods.py
|
sreenivas8084/cQube
|
3352a13f41679d707979e287d1880f0723b27510
|
[
"MIT"
] | 2
|
2022-02-01T00:55:12.000Z
|
2022-03-29T22:29:09.000Z
|
tests/src/Diksha_TPD/TPD_Enrollment_completion/check_completion_time_periods.py
|
SreenivasNimmagadda/cQube
|
3352a13f41679d707979e287d1880f0723b27510
|
[
"MIT"
] | null | null | null |
import os
import time
from selenium.webdriver.support.select import Select
from Data.parameters import Data
from filenames import file_extention
from get_dir import pwd
from reuse_func import GetData
class completion_time_periods():
def __init__(self,driver):
self.driver = driver
def test_completion_overall(self):
self.data = GetData()
self.p = pwd()
self.msg = file_extention()
count = 0
self.driver.find_element_by_xpath(Data.hyper_link).click()
self.data.page_loading(self.driver)
course_type = Select(self.driver.find_element_by_id(Data.coursetype))
course_type.select_by_visible_text(' Completion ')
self.data.page_loading(self.driver)
timeseries = Select(self.driver.find_element_by_name(Data.timeperiods))
timeseries.select_by_visible_text(' Overall ')
self.data.page_loading(self.driver)
if self.msg.no_data_available() in self.driver.page_source:
print('No Data Available for Over All')
else:
self.driver.find_element_by_id(Data.Download).click()
time.sleep(5)
times = (self.driver.find_element_by_name(Data.timeperiods).text).strip()
self.filename = self.p.get_download_dir() + '/'+'enrollment_completion_completion_all_district_overall_'+self.data.get_current_date()+'.csv'
print(self.filename)
self.data.page_loading(self.driver)
collnames = Select(self.driver.find_element_by_id(Data.coll_names))
counter = len(collnames.options)-1
for i in range(1,len(collnames.options)-1):
collnames.select_by_index(i)
self.data.page_loading(self.driver)
if os.path.isfile(self.filename) != True:
print('Completion Over all csv file is not downloaded ')
count = count + 1
self.data.page_loading(self.driver)
os.remove(self.filename)
return counter,count
def test_completion_last_day(self):
self.data = GetData()
self.p = pwd()
self.msg = file_extention()
count = 0
self.driver.find_element_by_xpath(Data.hyper_link).click()
self.data.page_loading(self.driver)
course_type = Select(self.driver.find_element_by_id(Data.coursetype))
course_type.select_by_visible_text(' Completion ')
self.data.page_loading(self.driver)
timeseries = Select(self.driver.find_element_by_name(Data.timeperiods))
timeseries.select_by_visible_text(' Last Day ')
self.data.page_loading(self.driver)
if self.msg.no_data_available() in self.driver.page_source:
print('No Data Available for last day')
else:
self.driver.find_element_by_id(Data.Download).click()
time.sleep(3)
times = (self.driver.find_element_by_name(Data.timeperiods).text).strip()
ctype = (self.driver.find_element_by_id(Data.coursetype).text).strip()
self.filename = self.p.get_download_dir() + '/'+'tpd_'+ctype+'_all_district_last_day'+'_'+self.data.get_current_date()+'.csv'
print(self.filename)
self.data.page_loading(self.driver)
collnames = Select(self.driver.find_element_by_id(Data.coll_names))
counter = len(collnames.options)-1
for i in range(1,len(collnames.options)-1):
collnames.select_by_index(i)
self.data.page_loading(self.driver)
if os.path.isfile(self.filename) != True:
print('Completion last day csv file is not downloaded ')
count = count + 1
self.data.page_loading(self.driver)
os.remove(self.filename)
# return counter,count
def test_completion_last7_days(self):
self.data = GetData()
self.p = pwd()
self.msg = file_extention()
count = 0
self.driver.find_element_by_xpath(Data.hyper_link).click()
self.data.page_loading(self.driver)
course_type = Select(self.driver.find_element_by_id(Data.coursetype))
course_type.select_by_visible_text(' Completion ')
self.data.page_loading(self.driver)
timeseries = Select(self.driver.find_element_by_name(Data.timeperiods))
timeseries.select_by_visible_text(' Last 7 Days ')
self.data.page_loading(self.driver)
if self.msg.no_data_available() in self.driver.page_source:
print('No Data Available for last 7 days')
else:
self.driver.find_element_by_id(Data.Download).click()
time.sleep(4)
times = (self.driver.find_element_by_name(Data.timeperiods).text).strip()
ctype = (self.driver.find_element_by_id(Data.coursetype).text).strip()
self.filename = self.p.get_download_dir() + '/'+'tpd_'+ctype+'_all_district_last_7_days'+'_'+self.data.get_current_date()+'.csv'
print(self.filename)
self.data.page_loading(self.driver)
collnames = Select(self.driver.find_element_by_id(Data.coll_names))
counter = len(collnames.options) - 1
for i in range(1, len(collnames.options) - 1):
collnames.select_by_index(i)
self.data.page_loading(self.driver)
if os.path.isfile(self.filename) != True:
print('Completion last 7 days csv file is not downloaded ')
count = count + 1
self.data.page_loading(self.driver)
os.remove(self.filename)
# return counter, count
def test_completion_last30_days(self):
self.data = GetData()
self.p = pwd()
self.msg = file_extention()
count = 0
self.driver.find_element_by_xpath(Data.hyper_link).click()
self.data.page_loading(self.driver)
course_type = Select(self.driver.find_element_by_id(Data.coursetype))
course_type.select_by_visible_text(' Completion ')
self.data.page_loading(self.driver)
timeseries = Select(self.driver.find_element_by_name(Data.timeperiods))
timeseries.select_by_visible_text(' Last 30 Days ')
self.data.page_loading(self.driver)
if self.msg.no_data_available() in self.driver.page_source:
print('No Data Available for last 30 days')
else:
self.driver.find_element_by_id(Data.Download).click()
time.sleep(3)
times = (self.driver.find_element_by_name(Data.timeperiods).text).strip()
ctype = (self.driver.find_element_by_id(Data.coursetype).text).strip()
self.filename = self.p.get_download_dir() + '/'+'tpd_'+ctype+'_all_district_last_30_days_'+'_'+self.data.get_current_date()+'.csv'
print(self.filename)
self.data.page_loading(self.driver)
collnames = Select(self.driver.find_element_by_id(Data.coll_names))
counter = len(collnames.options) - 1
for i in range(1, len(collnames.options) - 1):
collnames.select_by_index(i)
self.data.page_loading(self.driver)
if os.path.isfile(self.filename) != True:
print('Completion last 30 days csv file is not downloaded ')
count = count + 1
self.data.page_loading(self.driver)
os.remove(self.filename)
# return counter, count
| 48.383117
| 152
| 0.64233
| 957
| 7,451
| 4.758621
| 0.104493
| 0.125165
| 0.083004
| 0.124506
| 0.921168
| 0.919851
| 0.919851
| 0.919851
| 0.919851
| 0.913043
| 0
| 0.006976
| 0.249631
| 7,451
| 153
| 153
| 48.699346
| 0.807548
| 0.008589
| 0
| 0.767606
| 0
| 0
| 0.078423
| 0.017337
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035211
| false
| 0
| 0.049296
| 0
| 0.098592
| 0.084507
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c793ff712f66b698b5f31c7435ef20be58383faa
| 3,608
|
py
|
Python
|
tests/test_utils.py
|
itto-ki/Atarg
|
54b01bfc5e30ca901941bc553d2d59eefbb3701f
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
itto-ki/Atarg
|
54b01bfc5e30ca901941bc553d2d59eefbb3701f
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
itto-ki/Atarg
|
54b01bfc5e30ca901941bc553d2d59eefbb3701f
|
[
"MIT"
] | null | null | null |
import unittest
from atarg import utils
class HTMLParserTest(unittest.TestCase):
def test_fetch_inputs_and_outputs(self):
self.assertEqual(
utils.fetch_inputs_and_outputs(
'https://beta.atcoder.jp/contests/abc020/tasks/abc020_a',
'ABC', 20),
['1', 'ABC', '2', 'chokudai'])
self.assertEqual(
utils.fetch_inputs_and_outputs(
'https://beta.atcoder.jp/contests/abc020/tasks/abc020_a',
'abc', 20),
['1', 'ABC', '2', 'chokudai'])
self.assertEqual(
utils.fetch_inputs_and_outputs(
'https://beta.atcoder.jp/contests/abc001/tasks/abc001_1',
'ABC', 1),
['15\r\n10', '5', '0\r\n0', '0', '5\r\n20', '-15'])
self.assertEqual(
utils.fetch_inputs_and_outputs(
'https://beta.atcoder.jp/contests/arc057/tasks/arc057_a',
'ARC', 57),
['1000 300', '4', '6 2', '25', '567876543 0', '1999432123457'])
self.assertEqual(
utils.fetch_inputs_and_outputs(
'https://beta.atcoder.jp/contests/arc057/tasks/arc057_a',
'arc', 57),
['1000 300', '4', '6 2', '25', '567876543 0', '1999432123457'])
self.assertEqual(
utils.fetch_inputs_and_outputs(
'https://beta.atcoder.jp/contests/arc058/tasks/arc058_a',
'ARC', 58),
['1000 8\r\n1 3 4 5 6 7 8 9', '2000', '9999 1\r\n0', '9999'])
self.assertEqual(
utils.fetch_inputs_and_outputs(
'https://beta.atcoder.jp/contests/agc001/tasks/agc001_a',
'AGC', 1),
['2\r\n1 3 1 2', '3',
'5\r\n100 1 2 3 14 15 58 58 58 29', '135'])
self.assertEqual(
utils.fetch_inputs_and_outputs(
'https://beta.atcoder.jp/contests/agc001/tasks/agc001_a',
'agc', 1),
['2\r\n1 3 1 2', '3',
'5\r\n100 1 2 3 14 15 58 58 58 29', '135'])
def test_translate_task(self):
self.assertEqual(utils.translate_task('ABC', 19, 'A'), '1')
self.assertEqual(utils.translate_task('ABC', 20, 'A'), 'a')
self.assertEqual(utils.translate_task('ARC', 34, 'A'), '1')
self.assertEqual(utils.translate_task('ARC', 35, 'A'), 'a')
self.assertEqual(utils.translate_task('AGC', 1, 'A'), 'a')
def test_compose_task_url(self):
self.assertEqual(
utils.compose_task_url('ABC', 20, 'A'),
'https://beta.atcoder.jp/contests/abc020/tasks/abc020_a')
self.assertEqual(
utils.compose_task_url('ABC', 20, 'a'),
'https://beta.atcoder.jp/contests/abc020/tasks/abc020_a')
self.assertEqual(
utils.compose_task_url('abc', 20, 'A'),
'https://beta.atcoder.jp/contests/abc020/tasks/abc020_a')
self.assertEqual(
utils.compose_task_url('abc', 20, 'a'),
'https://beta.atcoder.jp/contests/abc020/tasks/abc020_a')
def test_compose_submit_url(self):
self.assertEqual(
utils.compose_submit_url('ABC', 20),
'https://atcoder.jp/contests/abc020/submit')
self.assertEqual(
utils.compose_submit_url('abc', 20),
'https://atcoder.jp/contests/abc020/submit')
if __name__ == '__main__':
unittest.main()
| 43.46988
| 79
| 0.522173
| 423
| 3,608
| 4.281324
| 0.172577
| 0.157372
| 0.209829
| 0.119271
| 0.837659
| 0.837659
| 0.80508
| 0.727775
| 0.727775
| 0.727775
| 0
| 0.117091
| 0.325388
| 3,608
| 82
| 80
| 44
| 0.626952
| 0
| 0
| 0.567568
| 0
| 0
| 0.298503
| 0
| 0
| 0
| 0
| 0
| 0.256757
| 1
| 0.054054
| false
| 0
| 0.027027
| 0
| 0.094595
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1bf8eb6573e22f59fe4426c1b2bbc96b1a9698cf
| 4,787
|
py
|
Python
|
tests/test_exporter/test_export_json_relationships_properties.py
|
di-unipi-socc/micro-tosca
|
5d5c9361b34eeabaed8955ddc62282607672bd81
|
[
"MIT"
] | null | null | null |
tests/test_exporter/test_export_json_relationships_properties.py
|
di-unipi-socc/micro-tosca
|
5d5c9361b34eeabaed8955ddc62282607672bd81
|
[
"MIT"
] | 3
|
2019-10-02T13:55:39.000Z
|
2021-06-01T22:55:20.000Z
|
tests/test_exporter/test_export_json_relationships_properties.py
|
di-unipi-socc/microFreshener-core
|
5d5c9361b34eeabaed8955ddc62282607672bd81
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from microfreshener.core.importer import JSONImporter
from microfreshener.core.exporter import JSONExporter
from microfreshener.core.model.type import MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY, MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY,MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY
class TestJSONTranformer(TestCase):
@classmethod
def setUpClass(self):
file = 'data/tests/test_relationships_properties.json'
self.importer = JSONImporter()
self.microtosca = self.importer.Import(file)
self.tranformer = JSONExporter()
def test_relationship(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target")
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY], False)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY], False)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY], False)
def test_relationship_has_id(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target")
self.assertIn("id", rel_dict.keys())
def test_relationship_t(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target_t")
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY], True)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY], False)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY], False)
def test_relationship_c(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target_c")
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY], False)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY], True)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY], False)
def test_relationship_d(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target_d")
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY], False)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY], False)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY], True)
def test_relationship_tc(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target_tc")
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY], True)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY], True)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY], False)
def test_relationship_td(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target_td")
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY], True)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY], False)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY], True)
def test_relationship_cd(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target_cd")
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY], False)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY], True)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY], True)
def test_relationship_tcd(self):
rel_dict = self._export_link_to_json_from_source_to_target("source", "target_tcd")
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_TIMEOUT_PROPERTY], True)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_CIRCUIT_BREAKER_PROPERTY], True)
self.assertEqual(rel_dict[MICROTOSCA_RELATIONSHIPS_INTERACT_WITH_DYNAMIC_DISCOVEY_PROPERTY], True)
def _export_link_to_json_from_source_to_target(self, source_name, target_name):
source = self.microtosca[source_name]
target = self.microtosca[target_name]
link_to_target = [
link for link in source.interactions if link.target == target]
self.assertEqual(len(link_to_target), 1)
rel_dict = self.tranformer.export_link_to_json(link_to_target[0])
return rel_dict
| 59.098765
| 228
| 0.799666
| 582
| 4,787
| 6.046392
| 0.113402
| 0.071611
| 0.237852
| 0.268542
| 0.808184
| 0.808184
| 0.793976
| 0.761296
| 0.751634
| 0.751634
| 0
| 0.000481
| 0.131398
| 4,787
| 80
| 229
| 59.8375
| 0.845839
| 0
| 0
| 0.419355
| 0
| 0
| 0.036371
| 0.009406
| 0
| 0
| 0
| 0
| 0.419355
| 1
| 0.177419
| false
| 0
| 0.096774
| 0
| 0.306452
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4002260b35d090080babe0ad83e2e47b82370a06
| 16,360
|
py
|
Python
|
train.py
|
Billmvp73/attention-sampling-pytorch
|
141144b6e504023aace0767a6757e71c4ab2846f
|
[
"MIT"
] | null | null | null |
train.py
|
Billmvp73/attention-sampling-pytorch
|
141144b6e504023aace0767a6757e71c4ab2846f
|
[
"MIT"
] | null | null | null |
train.py
|
Billmvp73/attention-sampling-pytorch
|
141144b6e504023aace0767a6757e71c4ab2846f
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
import numpy as np
from tqdm import tqdm
import pdb
from utils import calc_cls_measures, move_to, clip_grad_norm_
from ats.utils import visualize, showPatch, patchGrid, mapGrid
def train(model, optimizer, train_loader, criterion, entropy_loss_func, opts):
""" Train for a single epoch """
y_probs = np.zeros((0, len(train_loader.dataset.CLASSES)), np.float)
y_trues = np.zeros((0), np.int)
losses = []
# Put model in training mode
model.train()
for i, (x_low, x_high, label) in enumerate(tqdm(train_loader)):
x_low, x_high, label = move_to([x_low, x_high, label], opts.device)
optimizer.zero_grad()
y, attention_map, patches, x_low = model(x_low, x_high)
entropy_loss = entropy_loss_func(attention_map)
loss = criterion(y, label) - entropy_loss
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), opts.clipnorm)
optimizer.step()
loss_value = loss.item()
losses.append(loss_value)
y_prob = F.softmax(y, dim=1)
y_probs = np.concatenate([y_probs, y_prob.detach().cpu().numpy()])
y_trues = np.concatenate([y_trues, label.cpu().numpy()])
train_loss_epoch = np.round(np.mean(losses), 4)
metrics = calc_cls_measures(y_probs, y_trues)
return train_loss_epoch, metrics
def evaluate(model, test_loader, criterion, entropy_loss_func, opts):
""" Evaluate a single epoch """
y_probs = np.zeros((0, len(test_loader.dataset.CLASSES)), np.float)
y_trues = np.zeros((0), np.int)
losses = []
# Put model in eval mode
model.eval()
for i, (x_low, x_high, label) in enumerate(tqdm(test_loader)):
x_low, x_high, label = move_to([x_low, x_high, label], opts.device)
y, attention_map, patches, x_low = model(x_low, x_high)
entropy_loss = entropy_loss_func(attention_map)
loss = criterion(y, label) - entropy_loss
loss_value = loss.item()
losses.append(loss_value)
y_prob = F.softmax(y, dim=1)
y_probs = np.concatenate([y_probs, y_prob.detach().cpu().numpy()])
y_trues = np.concatenate([y_trues, label.cpu().numpy()])
test_loss_epoch = np.round(np.mean(losses), 4)
metrics = calc_cls_measures(y_probs, y_trues)
return test_loss_epoch, metrics
#----- Train/Evaluate MultiResolution ------
def trainMultiRes(model, optimizer, train_loader, criterion, entropy_loss_func, opts):
""" Train for a single epoch """
y_probs = np.zeros((0, len(train_loader.dataset.CLASSES)), np.float)
y_trues = np.zeros((0), np.int)
losses = []
total_sampled_scales = np.zeros(len(opts.scales), dtype=np.int)
# Put model in training mode
model.train()
for ats_model in model.attention_models:
ats_model.train()
for i, (x_lows, x_highs, label) in enumerate(tqdm(train_loader)):
x_lows, x_highs, label = move_to([x_lows, x_highs, label], opts.device)
optimizer.zero_grad()
y, attention_maps, patches, x_lows, patch_features, sampled_scales = model(x_lows, x_highs)
if type(attention_maps) is list:
if sampled_scales is not None:
freq_sampled_scales = np.bincount(sampled_scales.data.cpu().numpy().reshape(-1), minlength=len(opts.scales))
entropy_loss = freq_sampled_scales[0] * entropy_loss_func(attention_maps[0])
for i in range(1, len(opts.scales)):
entropy_loss += freq_sampled_scales[i] * entropy_loss_func(attention_maps[i])
entropy_loss /= freq_sampled_scales.sum()
else:
entropy_loss = entropy_loss_func(attention_maps[0])
for i in range(1, len(opts.scales)):
entropy_loss += entropy_loss_func(attention_maps[i])
entropy_loss /= len(opts.scales)
loss = criterion(y, label) - entropy_loss
if sampled_scales is not None:
freq_sampled_scales = np.bincount(sampled_scales.data.cpu().numpy().reshape(-1), minlength=len(opts.scales))
# if 0 in freq_sampled_scales:
# print("Check the gradient for that non-sample scale")
total_sampled_scales += freq_sampled_scales
else:
entropy_loss = entropy_loss_func(attention_maps)
loss = criterion(y, label) - entropy_loss
loss.backward()
all_params = [ats_model.parameters() for ats_model in model.attention_models] + [model.parameters()]
clip_grad_norm_(all_params, opts.clipnorm)
optimizer.step()
loss_value = loss.item()
losses.append(loss_value)
y_prob = F.softmax(y, dim=1)
y_probs = np.concatenate([y_probs, y_prob.detach().cpu().numpy()])
y_trues = np.concatenate([y_trues, label.cpu().numpy()])
train_loss_epoch = np.round(np.mean(losses), 4)
metrics = calc_cls_measures(y_probs, y_trues)
print("Sampled scale frequencies: ", total_sampled_scales)
return train_loss_epoch, metrics
def evaluateMultiRes(model, test_loader, criterion, entropy_loss_func, opts):
""" Evaluate a single epoch """
y_probs = np.zeros((0, len(test_loader.dataset.CLASSES)), np.float)
y_trues = np.zeros((0), np.int)
losses = []
total_sampled_scales = np.zeros(len(opts.scales), dtype=np.int)
# Put model in eval mode
model.eval()
for ats_model in model.attention_models:
ats_model.eval()
# model.sampler._top_k = True
for i, (x_lows, x_highs, label) in enumerate(tqdm(test_loader)):
x_lows, x_highs, label = move_to([x_lows, x_highs, label], opts.device)
y, attention_maps, patches, x_lows, patch_features, sampled_scales = model(x_lows, x_highs)
## visualize
# for i, (scale, x_low) in enumerate(zip(model.scales, x_lows)):
# if type(attention_maps) is list:
# ats_map = attention_maps[i]
# showPatch()
if type(attention_maps) is list:
if sampled_scales is not None:
freq_sampled_scales = np.bincount(sampled_scales.data.cpu().numpy().reshape(-1), minlength=len(opts.scales))
entropy_loss = freq_sampled_scales[0] * entropy_loss_func(attention_maps[0])
for i in range(1, len(opts.scales)):
entropy_loss += freq_sampled_scales[i] * entropy_loss_func(attention_maps[i])
entropy_loss /= freq_sampled_scales.sum()
else:
entropy_loss = entropy_loss_func(attention_maps[0])
for i in range(1, len(opts.scales)):
entropy_loss += entropy_loss_func(attention_maps[i])
entropy_loss /= len(opts.scales)
loss = criterion(y, label) - entropy_loss
if sampled_scales is not None:
freq_sampled_scales = np.bincount(sampled_scales.data.cpu().numpy().reshape(-1), minlength=len(opts.scales))
total_sampled_scales += freq_sampled_scales
else:
entropy_loss = entropy_loss_func(attention_maps)
loss = criterion(y, label) - entropy_loss
loss_value = loss.item()
losses.append(loss_value)
y_prob = F.softmax(y, dim=1)
y_probs = np.concatenate([y_probs, y_prob.detach().cpu().numpy()])
y_trues = np.concatenate([y_trues, label.cpu().numpy()])
if opts.visualize:
for b in range(patches.shape[0]):
print("expectation prediction: ", y_probs[b])
print("label prediction: ", label[0])
batch_patches = patches[b]
patch_feature = patch_features[b]
y_patch = model.classifier(patch_feature)
y_patch_prop = F.softmax(y_patch, dim=1)
# print("patch %d: "%b, y_patch)
# print("probability %d: " % b, y_patch_prop)
predicted = []
for prob in y_patch_prop:
if prob[0] >= prob[1]:
predicted.append(0)
else:
predicted.append(1)
# patchGrid(batch_patches, (3, 5))
if type(attention_maps) is list:
batch_maps = [attention_map[b].cpu().numpy() for attention_map in attention_maps]
# for attention_map in batch_maps:
# print(np.max(attention_map))
# print(np.min(attention_map))
# batch_maps = [attention_maps[i][b] for i in range(len(model.scales))]
else:
# batch_maps = [attention_maps[b] for i in range(len(model.scales))]
batch_maps = [attention_maps[b].cpu().numpy()]
batch_imgs = [x_lows[i][b] for i in range(len(model.scales))]
# mapGrid(batch_maps, batch_imgs, model.scales)
if sampled_scales is not None:
print("sampled patch scales: ", sampled_scales[b])
patchGrid(batch_patches, batch_maps, batch_imgs, (1, 5), predicted, sampled_scales[b])
else:
patchGrid(batch_patches, batch_maps, batch_imgs, (1, 5), predicted)
# model.sampler._top_k = False
test_loss_epoch = np.round(np.mean(losses), 4)
metrics = calc_cls_measures(y_probs, y_trues)
print("Sampled scale frequencies: ", total_sampled_scales)
return test_loss_epoch, metrics
def trainMultiResBatches(model, optimizer, train_loader, criterion, entropy_loss_func, opts):
""" Train for a single epoch """
y_probs = np.zeros((0, len(train_loader.dataset.CLASSES)), np.float)
y_trues = np.zeros((0), np.int)
losses = [[] for s in opts.scales]
metrics = []
# Put model in training mode
model.train()
for i, (x_low, x_high, label) in enumerate(tqdm(train_loader)):
# high res batch
x_low, x_high, label = move_to([x_low, x_high, label], opts.device)
optimizer.zero_grad()
y, attention_map, patches, x_low_out = model(x_low, x_high)
entropy_loss = entropy_loss_func(attention_map)
loss = criterion(y, label) - entropy_loss
loss.backward()
# for p in model.parameters():
# print(p.grad)
torch.nn.utils.clip_grad_norm_(model.parameters(), opts.clipnorm)
optimizer.step()
loss_value = loss.item()
losses[0].append(loss_value)
y_prob = F.softmax(y, dim=1)
y_probs = np.concatenate([y_probs, y_prob.detach().cpu().numpy()])
y_trues = np.concatenate([y_trues, label.cpu().numpy()])
metric = calc_cls_measures(y_probs, y_trues)
metrics.append(metric)
# scale-2 low res batch
for i in range(1, len(opts.scales)):
s = opts.scales[i]
x_low_i = F.interpolate(x_low, scale_factor = s, mode='bilinear')
x_high_i = F.interpolate(x_high, scale_factor = s, mode='bilinear')
x_low_i, x_high_i = move_to([x_low_i, x_high_i], opts.device)
optimizer.zero_grad()
y, attention_map, patches, x_low_i_out = model(x_low_i, x_high_i)
entropy_loss = entropy_loss_func(attention_map)
loss = criterion(y, label) - entropy_loss
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), opts.clipnorm)
optimizer.step()
loss_value = loss.item()
losses[i].append(loss_value)
y_prob = F.softmax(y, dim=1)
y_probs = np.concatenate([y_probs, y_prob.detach().cpu().numpy()])
y_trues = np.concatenate([y_trues, label.cpu().numpy()])
metric = calc_cls_measures(y_probs, y_trues)
metrics.append(metric)
train_loss_epoch = [np.round(np.mean(loss_s), 4) for loss_s in losses]
# metrics = calc_cls_measures(y_probs, y_trues)
return train_loss_epoch, metrics
def evaluateMultiResBatches(model, test_loader, criterion, entropy_loss_func, opts):
""" Train for a single epoch """
y_probs = np.zeros((0, len(test_loader.dataset.CLASSES)), np.float)
y_trues = np.zeros((0), np.int)
losses = [[] for s in opts.scales]
metrics = []
# Put model in eval mode
model.eval()
all_patches = []
all_maps = []
all_x_low = []
all_sampled_ats = []
for i, (x_low, x_high, label) in enumerate(tqdm(test_loader)):
# high res batch
x_low, x_high, label = move_to([x_low, x_high, label], opts.device)
y, attention_map, patches, x_low_out, sampled_attention = model(x_low, x_high)
if opts.visualize:
all_patches.append(patches)
all_maps.append(attention_map)
all_x_low.append(x_low_out)
all_sampled_ats.append(sampled_attention)
entropy_loss = entropy_loss_func(attention_map)
loss = criterion(y, label) - entropy_loss
loss_value = loss.item()
losses[0].append(loss_value)
y_prob = F.softmax(y, dim=1)
y_probs = np.concatenate([y_probs, y_prob.detach().cpu().numpy()])
y_trues = np.concatenate([y_trues, label.cpu().numpy()])
metric = calc_cls_measures(y_probs, y_trues)
metrics.append(metric)
# scale-2 low res batch
for i in range(1, len(opts.scales)):
s = opts.scales[i]
x_low_i = F.interpolate(x_low, scale_factor = s, mode='bilinear')
x_high_i = F.interpolate(x_high, scale_factor = s, mode='bilinear')
x_low_i, x_high_i = move_to([x_low_i, x_high_i], opts.device)
y, attention_map, patches, x_low_i_out, sampled_attention = model(x_low_i, x_high_i)
if opts.visualize:
all_patches.append(patches)
all_maps.append(attention_map)
all_x_low.append(x_low_i_out)
all_sampled_ats.append(sampled_attention)
entropy_loss = entropy_loss_func(attention_map)
loss = criterion(y, label) - entropy_loss
loss_value = loss.item()
losses[i].append(loss_value)
y_prob = F.softmax(y, dim=1)
y_probs = np.concatenate([y_probs, y_prob.detach().cpu().numpy()])
y_trues = np.concatenate([y_trues, label.cpu().numpy()])
metric = calc_cls_measures(y_probs, y_trues)
metrics.append(metric)
if opts.visualize:
all_patches_tensor = torch.cat(all_patches, dim=1)
# all_maps_tensor = torch.stack(all_maps, dim=1)
for b in range(patches.shape[0]):
batch_patches = all_patches_tensor[b]
batch_maps = [attention_map[b].cpu().numpy() for attention_map in all_maps]
for ats in batch_maps:
print(ats)
# print(torch.min())
batch_imgs = [x_low_i[b] for x_low_i in all_x_low]
batch_sampled_ats = [sampled_attetion[b].cpu().numpy() for sampled_attetion in all_sampled_ats]
print(batch_sampled_ats)
patchGrid(batch_patches, batch_maps, batch_imgs, (3, 5))
# mapGrid(batch_maps, batch_imgs, opts.scales)
test_loss_epoch = [np.round(np.mean(loss_s), 4) for loss_s in losses]
# metrics = calc_cls_measures(y_probs, y_trues)
return test_loss_epoch, metrics
def save_checkpoint(model, optimizer, save_path, epoch):
state_dict = {}
for i, ats_model in enumerate(model.attention_models):
state_dict['ats_model_state_dict%d'%i] = ats_model.state_dict()
state_dict['model_state_dict'] = model.state_dict()
state_dict['optimizer_state_dict'] = optimizer.state_dict()
state_dict['epoch'] = epoch
torch.save(state_dict, save_path)
def load_checkpoint(model, optimizer, load_path):
checkpoint = torch.load(load_path)
model.load_state_dict(checkpoint['model_state_dict'])
for i, ats_model in enumerate(model.attention_models):
ats_model.load_state_dict(checkpoint['ats_model_state_dict%d'%i])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
epoch = checkpoint['epoch']
return model, optimizer, epoch
| 40.295567
| 124
| 0.61901
| 2,209
| 16,360
| 4.307832
| 0.076053
| 0.060109
| 0.034678
| 0.015132
| 0.82356
| 0.793611
| 0.772278
| 0.751997
| 0.736444
| 0.722152
| 0
| 0.005419
| 0.266809
| 16,360
| 406
| 125
| 40.295567
| 0.787912
| 0.083863
| 0
| 0.739927
| 0
| 0
| 0.018504
| 0.00295
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029304
| false
| 0
| 0.025641
| 0
| 0.080586
| 0.025641
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40213ae63f8c0d4db2527e0466e6408067fb16bb
| 123
|
py
|
Python
|
naslib/search_spaces/nasbench101/tmp/__init__.py
|
deepdad/NASLib-1
|
6c93788f145187fe8cda446531f5b9f98e4ab48b
|
[
"Apache-2.0"
] | 14
|
2021-12-08T17:56:01.000Z
|
2022-01-15T05:06:59.000Z
|
naslib/search_spaces/nasbench101/tmp/__init__.py
|
deepdad/NASLib-1
|
6c93788f145187fe8cda446531f5b9f98e4ab48b
|
[
"Apache-2.0"
] | 4
|
2022-01-10T09:04:38.000Z
|
2022-01-23T03:35:09.000Z
|
naslib/search_spaces/nasbench101/tmp/__init__.py
|
deepdad/NASLib-1
|
6c93788f145187fe8cda446531f5b9f98e4ab48b
|
[
"Apache-2.0"
] | 1
|
2021-12-08T17:56:06.000Z
|
2021-12-08T17:56:06.000Z
|
from .search_space_1 import SearchSpace1
from .search_space_2 import SearchSpace2
from .search_space_3 import SearchSpace3
| 30.75
| 40
| 0.878049
| 18
| 123
| 5.666667
| 0.555556
| 0.294118
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 0.097561
| 123
| 3
| 41
| 41
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4022bad862283139afbf5ac361732179cb750b78
| 131,797
|
py
|
Python
|
pymoso/testers/tpctester.py
|
pymoso/PyMOSO
|
72f43dc6b4259b4772db7c7ffa27e197e5d3367a
|
[
"MIT"
] | 9
|
2019-03-26T21:41:13.000Z
|
2021-07-05T09:06:32.000Z
|
pymoso/testers/tpctester.py
|
pymoso/PyMOSO
|
72f43dc6b4259b4772db7c7ffa27e197e5d3367a
|
[
"MIT"
] | null | null | null |
pymoso/testers/tpctester.py
|
pymoso/PyMOSO
|
72f43dc6b4259b4772db7c7ffa27e197e5d3367a
|
[
"MIT"
] | 4
|
2019-08-06T07:52:13.000Z
|
2022-02-01T13:14:03.000Z
|
#!/usr/bin/env python
"""
Summary
-------
Provide the tester for Test Problem C
"""
from ..problems import probtpc
from math import sin, exp, sqrt
from ..chnutils import dh
df = 2
def true_g(x):
"""
Compute the expected values of a point.
Parameters
----------
x : tuple of int
A feasible point
Returns
-------
tuple of float
The objective values
"""
x = tuple(i/df for i in x)
s = [sin(i) for i in x]
chisquare = 1.0
sum1 = [-10*chisquare*exp(-0.2*sqrt(x[i]**2 + x[i+1]**2)) for i in [0, 1]]
sum2 = [chisquare*(pow(abs(x[i]), 0.8) + 5*pow(s[i], 3)) for i in [0, 1, 2]]
obj1 = sum(sum1)
obj2 = sum(sum2)
return obj1, obj2
def get_ranx0(rng):
"""
Uniformly sample from the feasible space.
Parameters
----------
rng : prng.MRG32k3a object
Returns
-------
x0 : tuple of int
The randomly chosen point
"""
xr = range(-5*df, 5*df + 1)
x1 = rng.choice(xr)
x2 = rng.choice(xr)
x3 = rng.choice(xr)
x0 = (x1, x2, x3)
return x0
class TPCTester(object):
"""
Store useful data for working with Test Problem C.
Attributes
----------
ranorc : chnbase.Oracle class
true_g : function
soln : list of set of tuple of int
The set of LES's which solve TPC locally
get_ranx0 : function
"""
def __init__(self):
self.ranorc = probtpc.ProbTPC
self.true_g = true_g
self.soln = soln
self.get_ranx0 = get_ranx0
def metric(self, eles):
"""
Compute a metric from a simulated solution to the true solution.
Parameters
----------
eles : set of tuple of numbers
Simulated solution
Returns
-------
float
The performance metric
"""
efrontier = []
for point in eles:
objs = self.true_g(point)
efrontier.append(objs)
distlist = []
for les in self.soln:
dist = dh(efrontier, les)
distlist.append(dist)
return min(distlist)
soln = [{(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-11.280688021901707, -4.918860901673243), (-14.816364413634357, -3.5793569796239844), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0)}, {(-14.065696597405992, -1.3395039220492593), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-20.0, 0.0)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846), (-8.131393194811983, -1.3395039220492593)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.164716709720746, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-11.280688021901707, -4.918860901673243), (-12.164716709720746, -3.318620105004037), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-13.372517286713464, -0.8050787779834598), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-13.473488803943512, -0.42620339169788846)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846), (-8.131393194811983, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-7.93820241249052, -4.918860901673243), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846), (-8.131393194811983, -1.3395039220492593)}, {(-13.473488803943512, -0.42620339169788846), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-13.372517286713464, -0.8050787779834598)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-6.866182354632733, -2.6790078440985186), (-20.0, 0.0)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-13.473488803943512, -0.42620339169788846), (-12.896298567757443, -1.1570803706709856), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-7.93820241249052, -4.918860901673243), (-20.0, 0.0)}, {(-12.989862749141844, -0.42620339169788846), (-12.521071683594995, -1.1570803706709856), (-8.131393194811983, -1.3395039220492593), (-20.0, 0.0)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-7.745011630169058, -6.258364823722503), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-14.816364413634357, -3.5793569796239844), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0)}, {(-10.415016733609885, -8.498217881297228), (-16.374615061559638, -1.9791161829547779), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-10.950300520984303, -6.897977084628021), (-14.816364413634357, -3.5793569796239844)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-11.473878804223169, -4.918860901673243), (-14.816364413634357, -3.5793569796239844), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-16.374615061559638, -1.9791161829547779), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-13.473488803943512, -0.42620339169788846), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-13.372517286713464, -0.8050787779834598)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-13.473488803943512, -0.42620339169788846), (-12.896298567757443, -1.1570803706709856), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-13.473488803943512, -0.42620339169788846), (-12.896298567757443, -1.1570803706709856), (-8.131393194811983, -1.3395039220492593), (-20.0, 0.0)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.164716709720746, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-14.065696597405992, -1.3395039220492593), (-6.866182354632733, -2.6790078440985186), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-14.065696597405992, -1.3395039220492593), (-8.131393194811983, -2.6790078440985186), (-5.600971514453484, -4.018511766147778), (-20.0, 0.0)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-14.816364413634357, -7.158713959247969), (-20.0, 0.0), (-16.374615061559638, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-15.595489737596997, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.816364413634357, -3.5793569796239844), (-5.600971514453484, -4.018511766147778), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846), (-8.131393194811983, -1.3395039220492593)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-14.065696597405992, -1.3395039220492593), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.280688021901707, -4.918860901673243), (-14.816364413634357, -3.5793569796239844), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-14.816364413634357, -7.158713959247969), (-20.0, 0.0), (-16.374615061559638, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-15.595489737596997, -5.558473162578762)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-14.065696597405992, -1.3395039220492593), (-11.280688021901707, -4.918860901673243), (-12.164716709720746, -3.318620105004037), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-14.816364413634357, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-7.93820241249052, -4.918860901673243), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846)}, {(-13.473488803943512, -0.42620339169788846), (-12.896298567757443, -1.1570803706709856), (-8.131393194811983, -1.3395039220492593), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-14.816364413634357, -7.158713959247969), (-20.0, 0.0), (-16.374615061559638, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-15.595489737596997, -5.558473162578762)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.164716709720746, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-13.372517286713464, -0.8050787779834598), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-13.473488803943512, -0.42620339169788846)}, {(-12.989862749141844, -0.42620339169788846), (-12.521071683594995, -1.1570803706709856), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-13.473488803943512, -0.42620339169788846), (-12.896298567757443, -1.1570803706709856), (-8.131393194811983, -1.3395039220492593), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-13.372517286713464, -0.8050787779834598), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-13.473488803943512, -0.42620339169788846)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-16.374615061559638, -1.9791161829547779), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.164716709720746, -3.318620105004037), (-7.745011630169058, -6.258364823722503), (-14.065696597405992, -1.3395039220492593)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-8.131393194811983, -2.6790078440985186), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-10.415016733609885, -8.498217881297228), (-16.374615061559638, -1.9791161829547779), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-13.473488803943512, -0.42620339169788846), (-12.896298567757443, -1.1570803706709856), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-7.745011630169058, -6.258364823722503), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.473878804223169, -4.918860901673243), (-14.816364413634357, -3.5793569796239844), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846), (-8.131393194811983, -1.3395039220492593)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-14.065696597405992, -1.3395039220492593), (-6.866182354632733, -2.6790078440985186), (-20.0, 0.0)}, {(-5.600971514453484, -4.018511766147778), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593), (-11.473878804223169, -4.918860901673243), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-8.131393194811983, -2.6790078440985186), (-5.600971514453484, -4.018511766147778), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-14.065696597405992, -1.3395039220492593), (-11.280688021901707, -4.918860901673243), (-12.164716709720746, -3.318620105004037), (-20.0, 0.0)}, {(-8.131393194811983, -1.3395039220492593), (-20.0, 0.0)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-12.989862749141844, -0.42620339169788846), (-12.521071683594995, -1.1570803706709856), (-20.0, 0.0)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-14.065696597405992, -1.3395039220492593), (-11.280688021901707, -4.918860901673243), (-12.164716709720746, -3.318620105004037), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-10.950300520984303, -6.897977084628021), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-14.816364413634357, -7.158713959247969), (-20.0, 0.0), (-7.745011630169058, -8.498217881297228), (-16.374615061559638, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-15.595489737596997, -5.558473162578762)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-14.816364413634357, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-11.280688021901707, -4.918860901673243), (-12.164716709720746, -3.318620105004037), (-20.0, 0.0)}, {(-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-10.950300520984303, -6.897977084628021), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-14.065696597405992, -1.3395039220492593), (-8.131393194811983, -2.6790078440985186), (-5.600971514453484, -4.018511766147778), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.473878804223169, -4.918860901673243), (-14.816364413634357, -3.5793569796239844), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-7.93820241249052, -4.918860901673243), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-5.600971514453484, -4.018511766147778), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-14.065696597405992, -1.3395039220492593), (-6.866182354632733, -2.6790078440985186), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-15.595489737596997, -5.558473162578762), (-14.509274506481022, -7.53758934553354), (-14.816364413634357, -7.158713959247969), (-20.0, 0.0), (-16.374615061559638, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.08502183705071, -10.738070938871953), (-13.945782684086748, -9.137830142202747), (-15.072766328875296, -5.937348548864334)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-14.816364413634357, -7.158713959247969), (-20.0, 0.0), (-16.374615061559638, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-15.595489737596997, -5.558473162578762)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-5.600971514453484, -4.018511766147778), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-12.989862749141844, -0.42620339169788846), (-12.521071683594995, -1.1570803706709856), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-12.989862749141844, -0.42620339169788846), (-12.521071683594995, -1.1570803706709856), (-8.131393194811983, -1.3395039220492593), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-14.816364413634357, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-14.065696597405992, -1.3395039220492593), (-20.0, 0.0)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-17.40818220681718, -3.5793569796239844), (-13.950693125342534, -7.158713959247969), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-14.816364413634357, -3.5793569796239844)}, {(-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-7.93820241249052, -4.918860901673243), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844), (-7.93820241249052, -4.918860901673243)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-7.745011630169058, -6.258364823722503), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.124191460850021, -4.005560371321873), (-17.40818220681718, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-7.745011630169058, -6.258364823722503), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-11.280688021901707, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-11.280688021901707, -4.918860901673243), (-14.816364413634357, -3.5793569796239844), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-10.950300520984303, -6.897977084628021), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-14.065696597405992, -1.3395039220492593), (-7.444135831774321, -1.7657073137471477), (-7.262810213332967, -2.496584292720245), (-20.0, 0.0)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-15.723690695217467, -3.9582323659095557), (-13.950693125342534, -7.158713959247969), (-17.40818220681718, -3.5793569796239844), (-15.160198872823193, -5.558473162578762)}, {(-12.989862749141844, -0.42620339169788846), (-12.521071683594995, -1.1570803706709856), (-8.131393194811983, -1.3395039220492593), (-20.0, 0.0)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.164716709720746, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-16.374615061559638, -1.9791161829547779), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-7.745011630169058, -6.258364823722503), (-14.816364413634357, -3.5793569796239844)}, {(-10.415016733609885, -8.498217881297228), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-6.866182354632733, -2.6790078440985186), (-20.0, 0.0)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-6.866182354632733, -2.6790078440985186), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-11.513792343378576, -5.297736287958815), (-20.0, 0.0), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.164716709720746, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-14.065696597405992, -1.3395039220492593), (-11.473878804223169, -4.918860901673243), (-12.253004128185811, -3.318620105004037), (-20.0, 0.0)}, {(-14.065696597405992, -1.3395039220492593), (-20.0, 0.0)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-14.816364413634357, -3.5793569796239844)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-12.849239255767326, -1.535955756956557), (-10.950300520984303, -6.897977084628021), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.253004128185811, -3.318620105004037), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-7.262810213332967, -2.496584292720245), (-7.444135831774321, -1.7657073137471477), (-20.0, 0.0), (-12.896298567757443, -1.1570803706709856), (-13.473488803943512, -0.42620339169788846)}, {(-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.280688021901707, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-20.0, 0.0), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-12.849239255767326, -1.535955756956557), (-11.655400395303172, -4.73643735029497), (-12.80902546431919, -2.4053195746526663), (-12.28574743337305, -3.1361965536257634), (-12.253004128185811, -3.318620105004037), (-12.124191460850021, -4.005560371321873), (-14.065696597405992, -1.3395039220492593)}, {(-11.473878804223169, -4.918860901673243), (-17.40818220681718, -3.5793569796239844), (-18.18730753077982, -1.9791161829547779), (-20.0, 0.0)}, {(-16.374615061559638, -1.9791161829547779), (-6.672991572311271, -6.258364823722503), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-7.745011630169058, -6.258364823722503), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-10.415016733609885, -8.498217881297228), (-16.374615061559638, -1.9791161829547779), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-14.816364413634357, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-10.415016733609885, -8.498217881297228), (-20.0, 0.0), (-11.513792343378576, -5.297736287958815), (-11.655400395303172, -4.73643735029497), (-10.950300520984303, -6.897977084628021), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}, {(-18.18730753077982, -1.9791161829547779), (-20.0, 0.0), (-6.672991572311271, -6.258364823722503), (-11.473878804223169, -4.918860901673243), (-11.655400395303172, -4.73643735029497), (-17.40818220681718, -3.5793569796239844), (-12.124191460850021, -4.005560371321873)}]
| 1,279.582524
| 129,674
| 0.734918
| 13,663
| 131,797
| 7.088268
| 0.013979
| 0.021312
| 0.021312
| 0.02664
| 0.985782
| 0.985606
| 0.985606
| 0.985586
| 0.984749
| 0.984037
| 0
| 0.769757
| 0.056132
| 131,797
| 102
| 129,675
| 1,292.127451
| 0.008762
| 0.0061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108108
| false
| 0
| 0.081081
| 0
| 0.297297
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4027d61c83a02a1111644cf33ac186acd6cd7f90
| 10,623
|
py
|
Python
|
tests/test_properties.py
|
M-o-a-T/asyncamqp
|
dd805fc93e668b4ae094609d4afefa4561e64ae4
|
[
"BSD-3-Clause"
] | 10
|
2018-03-17T01:58:51.000Z
|
2021-08-23T17:16:02.000Z
|
tests/test_properties.py
|
smurfix/trio-amqp
|
dd805fc93e668b4ae094609d4afefa4561e64ae4
|
[
"BSD-3-Clause"
] | 13
|
2018-02-27T16:02:08.000Z
|
2021-09-10T20:12:17.000Z
|
tests/test_properties.py
|
M-o-a-T/asyncamqp
|
dd805fc93e668b4ae094609d4afefa4561e64ae4
|
[
"BSD-3-Clause"
] | 5
|
2018-03-10T19:16:19.000Z
|
2020-12-18T16:19:13.000Z
|
"""
Tests for message properties for basic deliver
"""
import anyio
import pytest
import logging
from . import testcase
logger = logging.getLogger(__name__)
server_queue_name = 'server_queue'
client_queue_name = 'client_reply_queue'
exchange_name = 'exchange_name'
server_routing_key = 'reply_test'
class TestReplyOld(testcase.RabbitTestCase):
"""RPC test using classic callbacks"""
async def _server(
self,
amqp,
server_future,
exchange_name,
routing_key,
done,
):
"""Consume messages and reply to them by publishing messages back
to the client using routing key set to the reply_to property
"""
async with amqp.new_channel() as channel:
await channel.queue_declare(server_queue_name, exclusive=False, no_wait=False)
await channel.exchange_declare(exchange_name, type_name='direct')
await channel.queue_bind(server_queue_name, exchange_name, routing_key=routing_key)
async def server_callback(channel, body, envelope, properties):
logger.debug('Server received message')
publish_properties = {'correlation_id': properties.correlation_id}
logger.debug('Replying to %r', properties.reply_to)
await channel.publish(
b'reply message', exchange_name, properties.reply_to, publish_properties
)
server_future.test_result = (body, envelope, properties)
await server_future.set()
logger.debug('Server replied')
await channel.basic_consume(server_callback, queue_name=server_queue_name)
logger.debug('Server consuming messages')
await done.set()
await server_future.wait()
async def _client(
self,
amqp,
client_future,
exchange_name,
server_routing_key,
correlation_id,
client_routing_key,
done,
):
"""Declare a queue, bind client_routing_key to it, and publish a
message to the server with the reply_to property set to that
routing key
"""
async with amqp.new_channel() as client_channel:
await client_channel.queue_declare(client_queue_name, exclusive=True, no_wait=False)
await client_channel.queue_bind(
client_queue_name, exchange_name, routing_key=client_routing_key
)
async def client_callback(channel, body, envelope, properties):
logger.debug('Client received message')
client_future.test_result = (body, envelope, properties)
await client_future.set()
await client_channel.basic_consume(client_callback, queue_name=client_queue_name)
logger.debug('Client consuming messages')
await done.set()
await client_channel.publish(
b'client message', exchange_name, server_routing_key, {
'correlation_id': correlation_id,
'reply_to': client_routing_key
}
)
logger.debug('Client published message')
await client_future.wait()
@pytest.mark.trio
async def test_reply_to(self, amqp):
server_future = anyio.create_event()
async with anyio.create_task_group() as n:
done_here = anyio.create_event()
await n.spawn(self._server, amqp, server_future, exchange_name, server_routing_key, done_here)
await done_here.wait()
correlation_id = 'secret correlation id'
client_routing_key = 'secret_client_key'
client_future = anyio.create_event()
done_here = anyio.create_event()
await n.spawn(
self._client, amqp, client_future, exchange_name, server_routing_key,
correlation_id, client_routing_key, done_here
)
await done_here.wait()
logger.debug('Waiting for server to receive message')
await server_future.wait()
server_body, server_envelope, server_properties = \
server_future.test_result
assert server_body == b'client message'
assert server_properties.correlation_id == correlation_id
assert server_properties.reply_to == client_routing_key
assert server_envelope.routing_key == server_routing_key
logger.debug('Waiting for client to receive message')
await client_future.wait()
client_body, client_envelope, client_properties = \
client_future.test_result
assert client_body == b'reply message'
assert client_properties.correlation_id == correlation_id
assert client_envelope.routing_key == client_routing_key
await n.cancel_scope.cancel()
class TestReplyNew(testcase.RabbitTestCase):
"""RPC test using iteration"""
async def _server(
self,
amqp,
server_future,
exchange_name,
routing_key,
done,
):
"""Consume messages and reply to them by publishing messages back
to the client using routing key set to the reply_to property
"""
async with amqp.new_channel() as channel:
await channel.queue_declare(server_queue_name, exclusive=False, no_wait=False)
await channel.exchange_declare(exchange_name, type_name='direct')
await channel.queue_bind(server_queue_name, exchange_name, routing_key=routing_key)
async with anyio.create_task_group() as n:
done_here = anyio.create_event()
await n.spawn(self._server_consumer, channel, server_future, done_here)
await done_here.wait()
await done.set()
await server_future.wait()
await self._server_scope.cancel()
async def _server_consumer(self, channel, server_future, done):
async with anyio.open_cancel_scope() as scope:
self._server_scope = scope
async with channel.new_consumer(queue_name=server_queue_name) \
as data:
logger.debug('Server consuming messages')
await done.set()
async for body, envelope, properties in data:
logger.debug('Server received message')
publish_properties = {'correlation_id': properties.correlation_id}
logger.debug('Replying to %r', properties.reply_to)
await channel.publish(
b'reply message', exchange_name, properties.reply_to, publish_properties
)
server_future.test_result = (body, envelope, properties)
await server_future.set()
logger.debug('Server replied')
async def _client(
self,
amqp,
client_future,
exchange_name,
server_routing_key,
correlation_id,
client_routing_key,
done,
):
"""Declare a queue, bind client_routing_key to it, and publish a
message to the server with the reply_to property set to that
routing key
"""
async with amqp.new_channel() as client_channel:
await client_channel.queue_declare(client_queue_name, exclusive=True, no_wait=False)
await client_channel.queue_bind(
client_queue_name, exchange_name, routing_key=client_routing_key
)
async with anyio.create_task_group() as n:
done_here = anyio.create_event()
await n.spawn(self._client_consumer, client_channel, client_future, done_here)
await done_here.wait()
await done.set()
await client_channel.publish(
b'client message', exchange_name, server_routing_key, {
'correlation_id': correlation_id,
'reply_to': client_routing_key
}
)
logger.debug('Client published message')
await client_future.wait()
await self._client_scope.cancel()
async def _client_consumer(self, channel, client_future, done):
async with anyio.open_cancel_scope() as scope:
self._client_scope = scope
async with channel.new_consumer(queue_name=client_queue_name) \
as data:
await done.set()
logger.debug('Client consuming messages')
async for body, envelope, properties in data:
logger.debug('Client received message')
client_future.test_result = (body, envelope, properties)
await client_future.set()
@pytest.mark.trio
async def test_reply_to(self, amqp):
server_future = anyio.create_event()
async with anyio.create_task_group() as n:
done_here = anyio.create_event()
await n.spawn(self._server, amqp, server_future, exchange_name, server_routing_key, done_here)
await done_here.wait()
correlation_id = 'secret correlation id'
client_routing_key = 'secret_client_key'
client_future = anyio.create_event()
done_here = anyio.create_event()
await n.spawn(
self._client, amqp, client_future, exchange_name, server_routing_key,
correlation_id, client_routing_key, done_here
)
await done_here.wait()
logger.debug('Waiting for server to receive message')
await server_future.wait()
server_body, server_envelope, server_properties = \
server_future.test_result
assert server_body == b'client message'
assert server_properties.correlation_id == correlation_id
assert server_properties.reply_to == client_routing_key
assert server_envelope.routing_key == server_routing_key
logger.debug('Waiting for client to receive message')
await client_future.wait()
client_body, client_envelope, client_properties = \
client_future.test_result
assert client_body == b'reply message'
assert client_properties.correlation_id == correlation_id
assert client_envelope.routing_key == client_routing_key
await n.cancel_scope.cancel()
| 40.701149
| 106
| 0.618752
| 1,169
| 10,623
| 5.326775
| 0.085543
| 0.069054
| 0.041111
| 0.036133
| 0.908303
| 0.87217
| 0.86944
| 0.851614
| 0.83684
| 0.810503
| 0
| 0
| 0.312247
| 10,623
| 260
| 107
| 40.857692
| 0.852313
| 0.00979
| 0
| 0.830918
| 0
| 0
| 0.077468
| 0
| 0
| 0
| 0
| 0
| 0.067633
| 1
| 0
| false
| 0
| 0.019324
| 0
| 0.028986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
403bca96383ca9099b47194ceb4e69704b4e2554
| 5,080
|
py
|
Python
|
contact/migrations/0002_auto_20180322_1443.py
|
uktrade/invest
|
15b84c511839b46e81608fca9762d2df3f6df16c
|
[
"MIT"
] | 1
|
2019-01-18T03:50:46.000Z
|
2019-01-18T03:50:46.000Z
|
contact/migrations/0002_auto_20180322_1443.py
|
uktrade/invest
|
15b84c511839b46e81608fca9762d2df3f6df16c
|
[
"MIT"
] | 50
|
2018-01-24T18:04:08.000Z
|
2019-01-03T03:30:30.000Z
|
contact/migrations/0002_auto_20180322_1443.py
|
uktrade/invest
|
15b84c511839b46e81608fca9762d2df3f6df16c
|
[
"MIT"
] | 2
|
2018-02-12T15:20:52.000Z
|
2019-01-18T03:51:52.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-03-22 14:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contact', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='contactformpage',
name='heading_ar',
field=models.CharField(default='Contact Us', max_length=255, null=True),
),
migrations.AddField(
model_name='contactformpage',
name='heading_de',
field=models.CharField(default='Contact Us', max_length=255, null=True),
),
migrations.AddField(
model_name='contactformpage',
name='heading_en',
field=models.CharField(default='Contact Us', max_length=255, null=True),
),
migrations.AddField(
model_name='contactformpage',
name='heading_es',
field=models.CharField(default='Contact Us', max_length=255, null=True),
),
migrations.AddField(
model_name='contactformpage',
name='heading_fr',
field=models.CharField(default='Contact Us', max_length=255, null=True),
),
migrations.AddField(
model_name='contactformpage',
name='heading_ja',
field=models.CharField(default='Contact Us', max_length=255, null=True),
),
migrations.AddField(
model_name='contactformpage',
name='heading_pt',
field=models.CharField(default='Contact Us', max_length=255, null=True),
),
migrations.AddField(
model_name='contactformpage',
name='heading_zh',
field=models.CharField(default='Contact Us', max_length=255, null=True),
),
migrations.AddField(
model_name='feedbackformpage',
name='heading_ar',
field=models.CharField(default='Feedback', max_length=255, null=True),
),
migrations.AddField(
model_name='feedbackformpage',
name='heading_de',
field=models.CharField(default='Feedback', max_length=255, null=True),
),
migrations.AddField(
model_name='feedbackformpage',
name='heading_en',
field=models.CharField(default='Feedback', max_length=255, null=True),
),
migrations.AddField(
model_name='feedbackformpage',
name='heading_es',
field=models.CharField(default='Feedback', max_length=255, null=True),
),
migrations.AddField(
model_name='feedbackformpage',
name='heading_fr',
field=models.CharField(default='Feedback', max_length=255, null=True),
),
migrations.AddField(
model_name='feedbackformpage',
name='heading_ja',
field=models.CharField(default='Feedback', max_length=255, null=True),
),
migrations.AddField(
model_name='feedbackformpage',
name='heading_pt',
field=models.CharField(default='Feedback', max_length=255, null=True),
),
migrations.AddField(
model_name='feedbackformpage',
name='heading_zh',
field=models.CharField(default='Feedback', max_length=255, null=True),
),
migrations.AddField(
model_name='reportissueformpage',
name='heading_ar',
field=models.CharField(default='Report Issue', max_length=255, null=True),
),
migrations.AddField(
model_name='reportissueformpage',
name='heading_de',
field=models.CharField(default='Report Issue', max_length=255, null=True),
),
migrations.AddField(
model_name='reportissueformpage',
name='heading_en',
field=models.CharField(default='Report Issue', max_length=255, null=True),
),
migrations.AddField(
model_name='reportissueformpage',
name='heading_es',
field=models.CharField(default='Report Issue', max_length=255, null=True),
),
migrations.AddField(
model_name='reportissueformpage',
name='heading_fr',
field=models.CharField(default='Report Issue', max_length=255, null=True),
),
migrations.AddField(
model_name='reportissueformpage',
name='heading_ja',
field=models.CharField(default='Report Issue', max_length=255, null=True),
),
migrations.AddField(
model_name='reportissueformpage',
name='heading_pt',
field=models.CharField(default='Report Issue', max_length=255, null=True),
),
migrations.AddField(
model_name='reportissueformpage',
name='heading_zh',
field=models.CharField(default='Report Issue', max_length=255, null=True),
),
]
| 37.352941
| 86
| 0.585433
| 483
| 5,080
| 5.995859
| 0.118012
| 0.149171
| 0.190608
| 0.223757
| 0.936464
| 0.936464
| 0.936464
| 0.901588
| 0.901588
| 0.901588
| 0
| 0.02622
| 0.294291
| 5,080
| 135
| 87
| 37.62963
| 0.78159
| 0.013583
| 0
| 0.9375
| 1
| 0
| 0.179513
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015625
| 0
| 0.039063
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4045fff7bdff45d1169086e675fc8e7873f2671a
| 2,487
|
py
|
Python
|
tests/parser/constraint_with_60_variables.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/constraint_with_60_variables.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/constraint_with_60_variables.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
bar(a) | xbar(a).
% let variables occur twice each to avoid projection rewriting
:- bar(X1),bar(X2),bar(X3),bar(X4),bar(X5),bar(X6),bar(X7),bar(X8),bar(X9),bar(X10),bar(X11),bar(X12),bar(X13),bar(X14),bar(X15),bar(X16),bar(X17),bar(X18),bar(X19),bar(X20),bar(X21),bar(X22),bar(X23),bar(X24),bar(X25),bar(X26),bar(X27),bar(X28),bar(X29),bar(X30),bar(X31),bar(X32),bar(X33),bar(X34),bar(X35),bar(X36),bar(X37),bar(X38),bar(X39),bar(X40),bar(X41),bar(X42),bar(X43),bar(X44),bar(X45),bar(X46),bar(X47),bar(X48),bar(X49),bar(X50),bar(X51),bar(X52),bar(X53),bar(X54),bar(X55),bar(X56),bar(X57),bar(X58),bar(X59),bar(X60),
xbar(X1),xbar(X2),xbar(X3),xbar(X4),xbar(X5),xbar(X6),xbar(X7),xbar(X8),xbar(X9),xbar(X10),xbar(X11),xbar(X12),xbar(X13),xbar(X14),xbar(X15),xbar(X16),xbar(X17),xbar(X18),xbar(X19),xbar(X20),xbar(X21),xbar(X22),xbar(X23),xbar(X24),xbar(X25),xbar(X26),xbar(X27),xbar(X28),xbar(X29),xbar(X30),xbar(X31),xbar(X32),xbar(X33),xbar(X34),xbar(X35),xbar(X36),xbar(X37),xbar(X38),xbar(X39),xbar(X40),xbar(X41),xbar(X42),xbar(X43),xbar(X44),xbar(X45),xbar(X46),xbar(X47),xbar(X48),xbar(X49),xbar(X50),xbar(X51),xbar(X52),xbar(X53),xbar(X54),xbar(X55),xbar(X56),xbar(X57),xbar(X58),xbar(X59),xbar(X60).
:- xbar(a).
"""
output = """
bar(a) | xbar(a).
% let variables occur twice each to avoid projection rewriting
:- bar(X1),bar(X2),bar(X3),bar(X4),bar(X5),bar(X6),bar(X7),bar(X8),bar(X9),bar(X10),bar(X11),bar(X12),bar(X13),bar(X14),bar(X15),bar(X16),bar(X17),bar(X18),bar(X19),bar(X20),bar(X21),bar(X22),bar(X23),bar(X24),bar(X25),bar(X26),bar(X27),bar(X28),bar(X29),bar(X30),bar(X31),bar(X32),bar(X33),bar(X34),bar(X35),bar(X36),bar(X37),bar(X38),bar(X39),bar(X40),bar(X41),bar(X42),bar(X43),bar(X44),bar(X45),bar(X46),bar(X47),bar(X48),bar(X49),bar(X50),bar(X51),bar(X52),bar(X53),bar(X54),bar(X55),bar(X56),bar(X57),bar(X58),bar(X59),bar(X60),
xbar(X1),xbar(X2),xbar(X3),xbar(X4),xbar(X5),xbar(X6),xbar(X7),xbar(X8),xbar(X9),xbar(X10),xbar(X11),xbar(X12),xbar(X13),xbar(X14),xbar(X15),xbar(X16),xbar(X17),xbar(X18),xbar(X19),xbar(X20),xbar(X21),xbar(X22),xbar(X23),xbar(X24),xbar(X25),xbar(X26),xbar(X27),xbar(X28),xbar(X29),xbar(X30),xbar(X31),xbar(X32),xbar(X33),xbar(X34),xbar(X35),xbar(X36),xbar(X37),xbar(X38),xbar(X39),xbar(X40),xbar(X41),xbar(X42),xbar(X43),xbar(X44),xbar(X45),xbar(X46),xbar(X47),xbar(X48),xbar(X49),xbar(X50),xbar(X51),xbar(X52),xbar(X53),xbar(X54),xbar(X55),xbar(X56),xbar(X57),xbar(X58),xbar(X59),xbar(X60).
:- xbar(a).
"""
| 146.294118
| 592
| 0.670285
| 512
| 2,487
| 3.255859
| 0.144531
| 0.011998
| 0.009598
| 0.010798
| 0.993401
| 0.993401
| 0.993401
| 0.993401
| 0.993401
| 0.993401
| 0
| 0.182791
| 0.023321
| 2,487
| 16
| 593
| 155.4375
| 0.503499
| 0
| 0
| 0.857143
| 0
| 0.285714
| 0.987475
| 0.906667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
40961fc216212ce2d336090e7a3e4570d912b8d2
| 133
|
py
|
Python
|
docopt_parser/__init__.py
|
philip-h-dye/docpt-parser
|
abb5ea6566dac4cb77851d0cff69ed4aca3b804b
|
[
"MIT"
] | 2
|
2021-05-01T02:34:05.000Z
|
2022-02-13T06:41:36.000Z
|
docopt_parser/__init__.py
|
philip-h-dye/docpt-parser
|
abb5ea6566dac4cb77851d0cff69ed4aca3b804b
|
[
"MIT"
] | null | null | null |
docopt_parser/__init__.py
|
philip-h-dye/docpt-parser
|
abb5ea6566dac4cb77851d0cff69ed4aca3b804b
|
[
"MIT"
] | null | null | null |
from .core import *
# from .simplify import *
from .pass1 import *
from .pass2 import *
from .pass3 import *
from .listview import *
| 19
| 25
| 0.714286
| 18
| 133
| 5.277778
| 0.444444
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.18797
| 133
| 6
| 26
| 22.166667
| 0.851852
| 0.172932
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.6
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
40c98b124315c898a996585aabb61d1b8f20d144
| 8,667
|
py
|
Python
|
tests/test_date_options.py
|
patrickdehoon/dagu
|
71b6e5168afcdb1a9cdf0b5d6dd446a728c784e1
|
[
"MIT"
] | null | null | null |
tests/test_date_options.py
|
patrickdehoon/dagu
|
71b6e5168afcdb1a9cdf0b5d6dd446a728c784e1
|
[
"MIT"
] | null | null | null |
tests/test_date_options.py
|
patrickdehoon/dagu
|
71b6e5168afcdb1a9cdf0b5d6dd446a728c784e1
|
[
"MIT"
] | null | null | null |
import unittest
import json
from os import path
from dagu.dagu_exec import DaguExec
class TestDaguDateOptions(unittest.TestCase):
"""
The purpose of this unittest is to check if every date-option, for every length is being processed as expected.
"""
def setUp(self):
self.file = open(path.abspath(path.join(path.dirname( __file__ ), '..', 'dagu.json')))
self.config = json.load(self.file)
self.datetimes_with_divider_eu = ["01-02-1992 01:00:00 am", "01-02-1992 1:00:00 am", "01-02-1992 1:00:00", "01-02-1992 01:00:00",
"1-02-1992 01:00:00 am", "1-02-1992 1:00:00 am", "1-02-1992 1:00:00", "1-02-1992 01:00:00",
"01-2-1992 01:00:00 am", "01-2-1992 1:00:00 am", "01-2-1992 1:00:00", "01-2-1992 01:00:00",
"1-2-92 01:00:00 am", "1-2-92 1:00:00 am", "1-2-92 1:00:00", "1-2-92 01:00:00",
"01-02-92 01:00:00 am", "01-02-92 1:00:00 am", "01-02-92 01:00:00", "01-02-92 1:00:00",
"01-2-92 01:00:00 am", "01-2-92 1:00:00 am", "01-2-92 1:00:00", "01-2-92 01:00:00",
"1-02-92 01:00:00 am", "1-02-92 1:00:00 am", "1-02-92 1:00:00", "1-02-92 01:00:00",
"1-2-92 01:00:00 am", "1-2-92 1:00:00 am", "1-2-92 01:00:00", "1-2-92 1:00:00"]
self.datetimes_with_divider_usa = ["02/01/1992 01:00:00 am", "02/01/1992 1:00:00 am", "02/01/1992 1:00:00", "02/01/1992 01:00:00",
"2/01/1992 01:00:00 am", "2/01/1992 1:00:00 am", "2/01/1992 1:00:00", "2/01/1992 01:00:00",
"02/1/1992 01:00:00 am", "02/1/1992 1:00:00 am", "02/1/1992 1:00:00", "02/1/1992 01:00:00",
"2/1/92 01:00:00 am", "2/1/92 1:00:00 am", "2/1/92 1:00:00", "2/1/92 01:00:00",
"02/01/92 01:00:00 am", "02/01/92 1:00:00 am", "02/01/92 01:00:00", "02/01/92 1:00:00",
"02/1/92 01:00:00 am", "02/1/92 1:00:00 am", "02/1/92 1:00:00", "02/1/92 01:00:00",
"2/01/92 01:00:00 am", "2/01/92 1:00:00 am", "2/01/92 1:00:00", "2/01/92 01:00:00",
"2/1/92 01:00:00 am", "2/1/92 1:00:00 am", "2/1/92 01:00:00", "2/1/92 1:00:00"]
self.datetimes_without_divider = ["01021992 01:00:00 am", "01021992 1:00:00 am", "01021992 1:00:00",
"01021992 01:00:00",
"1021992 01:00:00 am", "1021992 1:00:00 am", "1021992 1:00:00",
"1021992 01:00:00",
"0121992 01:00:00 am", "0121992 1:00:00 am", "0121992 1:00:00",
"0121992 01:00:00",
"1292 01:00:00 am", "1292 1:00:00 am", "1292 1:00:00",
"1292 01:00:00",
"010292 01:00:00 am", "010292 1:00:00 am", "010292 01:00:00",
"010292 1:00:00",
"01292 01:00:00 am", "01292 1:00:00 am", "01292 1:00:00",
"01292 01:00:00",
"10292 01:00:00 am", "10292 1:00:00 am", "10292 1:00:00",
"10292 01:00:00",
"1292 01:00:00 am", "1292 1:00:00 am", "1292 01:00:00",
"1292 1:00:00"]
self.datetimes_with_divider_space = ["01 02 1992 01:00:00 am", "01 02 1992 1:00:00 am", "01 02 1992 1:00:00",
"01 02 1992 01:00:00",
"1 02 1992 01:00:00 am", "1 02 1992 1:00:00 am", "1 02 1992 1:00:00",
"1 02 1992 01:00:00",
"01 2 1992 01:00:00 am", "01 2 1992 1:00:00 am", "01 2 1992 1:00:00",
"01 2 1992 01:00:00",
"1 2 92 01:00:00 am", "1 2 92 1:00:00 am", "1 2 92 1:00:00",
"1 2 92 01:00:00",
"01 02 92 01:00:00 am", "01 02 92 1:00:00 am", "01 02 92 01:00:00",
"01 02 92 1:00:00",
"01 2 92 01:00:00 am", "01 2 92 1:00:00 am", "01 2 92 1:00:00",
"01 2 92 01:00:00",
"1 02 92 01:00:00 am", "1 02 92 1:00:00 am", "1 02 92 1:00:00",
"1 02 92 01:00:00",
"1 2 92 01:00:00 am", "1 2 92 1:00:00 am", "1 2 92 01:00:00",
"1 2 92 1:00:00"]
self.dates_with_divider_eu = ["01-02-1992", "1-02-1992", "01-2-1992", "1-2-92", "01-02-92", "01-2-92", "1-02-92", "1-2-92"]
self.dates_with_divider_usa = ["02/01/1992", "2/01/1992", "02/1/1992", "2/1/92", "02/01/92", "02/1/92", "2/01/92", "2/1/92"]
self.dates_with_divider_space = ["01 02 1992", "1 02 1992", "01 2 1992", "1 2 92", "01 02 92", "01 2 92", "1 02 92", "1 2 92"]
self.dates_without_divider = ["01021992", "1021992", "0121992", "1292", "010292", "01292", "10292", "1292"]
self.target_format = '%Y/%m/%d'
def test_datetimes_without_divider(self):
dagu_exec = DaguExec(dagu_config=self.config)
dates = self.datetimes_without_divider
for record in dates:
print(record)
dagu_exec.execute(record=record, target_format=self.target_format)
result = dagu_exec.get_record()
self.assertEqual(result, '1992/02/01')
def test_datetimes_with_divider_space(self):
dagu_exec = DaguExec(dagu_config=self.config)
dates = self.datetimes_with_divider_space
for record in dates:
print(record)
dagu_exec.execute(record=record, target_format=self.target_format)
result = dagu_exec.get_record()
self.assertEqual(result, '1992/02/01')
def test_datetimes_with_divider_usa(self):
dagu_exec = DaguExec(dagu_config=self.config)
dates = self.datetimes_with_divider_usa
for record in dates:
dagu_exec.execute(record=record, target_format=self.target_format)
result = dagu_exec.get_record()
self.assertEqual(result, '1992/02/01')
def test_datetimes_with_divider_eu(self):
dagu_exec = DaguExec(dagu_config=self.config)
dates = self.datetimes_with_divider_eu
for record in dates:
dagu_exec.execute(record=record, target_format=self.target_format)
result = dagu_exec.get_record()
self.assertEqual(result, '1992/02/01')
def test_dates_with_divider_eu(self):
dagu_exec = DaguExec(dagu_config=self.config)
dates = self.dates_with_divider_eu
for record in dates:
dagu_exec.execute(record=record, target_format=self.target_format)
result = dagu_exec.get_record()
self.assertEqual(result, '1992/02/01')
def test_dates_with_divider_usa(self):
dagu_exec = DaguExec(dagu_config=self.config)
dates = self.dates_with_divider_usa
for record in dates:
dagu_exec.execute(record=record, target_format=self.target_format)
result = dagu_exec.get_record()
self.assertEqual(result, '1992/02/01')
def test_dates_with_divider_space(self):
dagu_exec = DaguExec(dagu_config=self.config)
dates = self.dates_with_divider_space
for record in dates:
dagu_exec.execute(record=record, target_format=self.target_format)
result = dagu_exec.get_record()
self.assertEqual(result, '1992/02/01')
def test_dates_without_divider(self):
dagu_exec = DaguExec(dagu_config=self.config)
dates = self.dates_without_divider
for record in dates:
dagu_exec.execute(record=record, target_format=self.target_format)
result = dagu_exec.get_record()
self.assertEqual(result, '1992/02/01')
def tearDown(self):
self.file.close()
if __name__ == '__main__':
unittest.main()
| 60.1875
| 138
| 0.490712
| 1,322
| 8,667
| 3.108169
| 0.059758
| 0.124605
| 0.093453
| 0.062302
| 0.863957
| 0.856899
| 0.801655
| 0.736189
| 0.720613
| 0.684595
| 0
| 0.307692
| 0.367024
| 8,667
| 143
| 139
| 60.608392
| 0.441305
| 0.012807
| 0
| 0.585366
| 0
| 0
| 0.298313
| 0
| 0
| 0
| 0
| 0
| 0.065041
| 1
| 0.081301
| false
| 0
| 0.03252
| 0
| 0.121951
| 0.01626
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
907f699961f198e7eae4bff26bba52846a4cfcb7
| 32,223
|
py
|
Python
|
sdk/python/pulumi_gcp/bigtable/instance.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/bigtable/instance.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/bigtable/instance.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['InstanceArgs', 'Instance']
@pulumi.input_type
class InstanceArgs:
def __init__(__self__, *,
clusters: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceClusterArgs']]]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Instance resource.
:param pulumi.Input[Sequence[pulumi.Input['InstanceClusterArgs']]] clusters: A block of cluster configuration options. This can be specified at least once, and up to 4 times.
See structure below.
:param pulumi.Input[bool] deletion_protection: Whether or not to allow this provider to destroy the instance. Unless this field is set to false
in the statefile, a `pulumi destroy` or `pulumi up` that would delete the instance will fail.
:param pulumi.Input[str] display_name: The human-readable display name of the Bigtable instance. Defaults to the instance `name`.
:param pulumi.Input[str] instance_type: The instance type to create. One of `"DEVELOPMENT"` or `"PRODUCTION"`. Defaults to `"PRODUCTION"`.
It is recommended to leave this field unspecified since the distinction between `"DEVELOPMENT"` and `"PRODUCTION"` instances is going away,
and all instances will become `"PRODUCTION"` instances. This means that new and existing `"DEVELOPMENT"` instances will be converted to
`"PRODUCTION"` instances. It is recommended for users to use `"PRODUCTION"` instances in any case, since a 1-node `"PRODUCTION"` instance
is functionally identical to a `"DEVELOPMENT"` instance, but without the accompanying restrictions.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: A set of key/value label pairs to assign to the resource. Label keys must follow the requirements at https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements.
:param pulumi.Input[str] name: The name (also called Instance Id in the Cloud Console) of the Cloud Bigtable instance.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it
is not provided, the provider project is used.
"""
if clusters is not None:
pulumi.set(__self__, "clusters", clusters)
if deletion_protection is not None:
pulumi.set(__self__, "deletion_protection", deletion_protection)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if instance_type is not None:
warnings.warn("""It is recommended to leave this field unspecified since the distinction between \"DEVELOPMENT\" and \"PRODUCTION\" instances is going away, and all instances will become \"PRODUCTION\" instances. This means that new and existing \"DEVELOPMENT\" instances will be converted to \"PRODUCTION\" instances. It is recommended for users to use \"PRODUCTION\" instances in any case, since a 1-node \"PRODUCTION\" instance is functionally identical to a \"DEVELOPMENT\" instance, but without the accompanying restrictions.""", DeprecationWarning)
pulumi.log.warn("""instance_type is deprecated: It is recommended to leave this field unspecified since the distinction between \"DEVELOPMENT\" and \"PRODUCTION\" instances is going away, and all instances will become \"PRODUCTION\" instances. This means that new and existing \"DEVELOPMENT\" instances will be converted to \"PRODUCTION\" instances. It is recommended for users to use \"PRODUCTION\" instances in any case, since a 1-node \"PRODUCTION\" instance is functionally identical to a \"DEVELOPMENT\" instance, but without the accompanying restrictions.""")
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter
def clusters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceClusterArgs']]]]:
"""
A block of cluster configuration options. This can be specified at least once, and up to 4 times.
See structure below.
"""
return pulumi.get(self, "clusters")
@clusters.setter
def clusters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceClusterArgs']]]]):
pulumi.set(self, "clusters", value)
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not to allow this provider to destroy the instance. Unless this field is set to false
in the statefile, a `pulumi destroy` or `pulumi up` that would delete the instance will fail.
"""
return pulumi.get(self, "deletion_protection")
@deletion_protection.setter
def deletion_protection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deletion_protection", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The human-readable display name of the Bigtable instance. Defaults to the instance `name`.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[pulumi.Input[str]]:
"""
The instance type to create. One of `"DEVELOPMENT"` or `"PRODUCTION"`. Defaults to `"PRODUCTION"`.
It is recommended to leave this field unspecified since the distinction between `"DEVELOPMENT"` and `"PRODUCTION"` instances is going away,
and all instances will become `"PRODUCTION"` instances. This means that new and existing `"DEVELOPMENT"` instances will be converted to
`"PRODUCTION"` instances. It is recommended for users to use `"PRODUCTION"` instances in any case, since a 1-node `"PRODUCTION"` instance
is functionally identical to a `"DEVELOPMENT"` instance, but without the accompanying restrictions.
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A set of key/value label pairs to assign to the resource. Label keys must follow the requirements at https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name (also called Instance Id in the Cloud Console) of the Cloud Bigtable instance.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs. If it
is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@pulumi.input_type
class _InstanceState:
def __init__(__self__, *,
clusters: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceClusterArgs']]]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Instance resources.
:param pulumi.Input[Sequence[pulumi.Input['InstanceClusterArgs']]] clusters: A block of cluster configuration options. This can be specified at least once, and up to 4 times.
See structure below.
:param pulumi.Input[bool] deletion_protection: Whether or not to allow this provider to destroy the instance. Unless this field is set to false
in the statefile, a `pulumi destroy` or `pulumi up` that would delete the instance will fail.
:param pulumi.Input[str] display_name: The human-readable display name of the Bigtable instance. Defaults to the instance `name`.
:param pulumi.Input[str] instance_type: The instance type to create. One of `"DEVELOPMENT"` or `"PRODUCTION"`. Defaults to `"PRODUCTION"`.
It is recommended to leave this field unspecified since the distinction between `"DEVELOPMENT"` and `"PRODUCTION"` instances is going away,
and all instances will become `"PRODUCTION"` instances. This means that new and existing `"DEVELOPMENT"` instances will be converted to
`"PRODUCTION"` instances. It is recommended for users to use `"PRODUCTION"` instances in any case, since a 1-node `"PRODUCTION"` instance
is functionally identical to a `"DEVELOPMENT"` instance, but without the accompanying restrictions.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: A set of key/value label pairs to assign to the resource. Label keys must follow the requirements at https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements.
:param pulumi.Input[str] name: The name (also called Instance Id in the Cloud Console) of the Cloud Bigtable instance.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it
is not provided, the provider project is used.
"""
if clusters is not None:
pulumi.set(__self__, "clusters", clusters)
if deletion_protection is not None:
pulumi.set(__self__, "deletion_protection", deletion_protection)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if instance_type is not None:
warnings.warn("""It is recommended to leave this field unspecified since the distinction between \"DEVELOPMENT\" and \"PRODUCTION\" instances is going away, and all instances will become \"PRODUCTION\" instances. This means that new and existing \"DEVELOPMENT\" instances will be converted to \"PRODUCTION\" instances. It is recommended for users to use \"PRODUCTION\" instances in any case, since a 1-node \"PRODUCTION\" instance is functionally identical to a \"DEVELOPMENT\" instance, but without the accompanying restrictions.""", DeprecationWarning)
pulumi.log.warn("""instance_type is deprecated: It is recommended to leave this field unspecified since the distinction between \"DEVELOPMENT\" and \"PRODUCTION\" instances is going away, and all instances will become \"PRODUCTION\" instances. This means that new and existing \"DEVELOPMENT\" instances will be converted to \"PRODUCTION\" instances. It is recommended for users to use \"PRODUCTION\" instances in any case, since a 1-node \"PRODUCTION\" instance is functionally identical to a \"DEVELOPMENT\" instance, but without the accompanying restrictions.""")
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter
def clusters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceClusterArgs']]]]:
"""
A block of cluster configuration options. This can be specified at least once, and up to 4 times.
See structure below.
"""
return pulumi.get(self, "clusters")
@clusters.setter
def clusters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceClusterArgs']]]]):
pulumi.set(self, "clusters", value)
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not to allow this provider to destroy the instance. Unless this field is set to false
in the statefile, a `pulumi destroy` or `pulumi up` that would delete the instance will fail.
"""
return pulumi.get(self, "deletion_protection")
@deletion_protection.setter
def deletion_protection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deletion_protection", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The human-readable display name of the Bigtable instance. Defaults to the instance `name`.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[pulumi.Input[str]]:
"""
The instance type to create. One of `"DEVELOPMENT"` or `"PRODUCTION"`. Defaults to `"PRODUCTION"`.
It is recommended to leave this field unspecified since the distinction between `"DEVELOPMENT"` and `"PRODUCTION"` instances is going away,
and all instances will become `"PRODUCTION"` instances. This means that new and existing `"DEVELOPMENT"` instances will be converted to
`"PRODUCTION"` instances. It is recommended for users to use `"PRODUCTION"` instances in any case, since a 1-node `"PRODUCTION"` instance
is functionally identical to a `"DEVELOPMENT"` instance, but without the accompanying restrictions.
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A set of key/value label pairs to assign to the resource. Label keys must follow the requirements at https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name (also called Instance Id in the Cloud Console) of the Cloud Bigtable instance.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs. If it
is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
class Instance(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
clusters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceClusterArgs']]]]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a Google Bigtable instance. For more information see:
* [API documentation](https://cloud.google.com/bigtable/docs/reference/admin/rest/v2/projects.instances.clusters)
* How-to Guides
* [Official Documentation](https://cloud.google.com/bigtable/docs)
## Example Usage
### Production Instance
```python
import pulumi
import pulumi_gcp as gcp
production_instance = gcp.bigtable.Instance("production-instance",
clusters=[gcp.bigtable.InstanceClusterArgs(
cluster_id="tf-instance-cluster",
num_nodes=1,
storage_type="HDD",
)],
labels={
"my-label": "prod-label",
})
```
## Import
Bigtable Instances can be imported using any of these accepted formats
```sh
$ pulumi import gcp:bigtable/instance:Instance default projects/{{project}}/instances/{{name}}
```
```sh
$ pulumi import gcp:bigtable/instance:Instance default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:bigtable/instance:Instance default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceClusterArgs']]]] clusters: A block of cluster configuration options. This can be specified at least once, and up to 4 times.
See structure below.
:param pulumi.Input[bool] deletion_protection: Whether or not to allow this provider to destroy the instance. Unless this field is set to false
in the statefile, a `pulumi destroy` or `pulumi up` that would delete the instance will fail.
:param pulumi.Input[str] display_name: The human-readable display name of the Bigtable instance. Defaults to the instance `name`.
:param pulumi.Input[str] instance_type: The instance type to create. One of `"DEVELOPMENT"` or `"PRODUCTION"`. Defaults to `"PRODUCTION"`.
It is recommended to leave this field unspecified since the distinction between `"DEVELOPMENT"` and `"PRODUCTION"` instances is going away,
and all instances will become `"PRODUCTION"` instances. This means that new and existing `"DEVELOPMENT"` instances will be converted to
`"PRODUCTION"` instances. It is recommended for users to use `"PRODUCTION"` instances in any case, since a 1-node `"PRODUCTION"` instance
is functionally identical to a `"DEVELOPMENT"` instance, but without the accompanying restrictions.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: A set of key/value label pairs to assign to the resource. Label keys must follow the requirements at https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements.
:param pulumi.Input[str] name: The name (also called Instance Id in the Cloud Console) of the Cloud Bigtable instance.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it
is not provided, the provider project is used.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[InstanceArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a Google Bigtable instance. For more information see:
* [API documentation](https://cloud.google.com/bigtable/docs/reference/admin/rest/v2/projects.instances.clusters)
* How-to Guides
* [Official Documentation](https://cloud.google.com/bigtable/docs)
## Example Usage
### Production Instance
```python
import pulumi
import pulumi_gcp as gcp
production_instance = gcp.bigtable.Instance("production-instance",
clusters=[gcp.bigtable.InstanceClusterArgs(
cluster_id="tf-instance-cluster",
num_nodes=1,
storage_type="HDD",
)],
labels={
"my-label": "prod-label",
})
```
## Import
Bigtable Instances can be imported using any of these accepted formats
```sh
$ pulumi import gcp:bigtable/instance:Instance default projects/{{project}}/instances/{{name}}
```
```sh
$ pulumi import gcp:bigtable/instance:Instance default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:bigtable/instance:Instance default {{name}}
```
:param str resource_name: The name of the resource.
:param InstanceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InstanceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
clusters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceClusterArgs']]]]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InstanceArgs.__new__(InstanceArgs)
__props__.__dict__["clusters"] = clusters
__props__.__dict__["deletion_protection"] = deletion_protection
__props__.__dict__["display_name"] = display_name
if instance_type is not None and not opts.urn:
warnings.warn("""It is recommended to leave this field unspecified since the distinction between \"DEVELOPMENT\" and \"PRODUCTION\" instances is going away, and all instances will become \"PRODUCTION\" instances. This means that new and existing \"DEVELOPMENT\" instances will be converted to \"PRODUCTION\" instances. It is recommended for users to use \"PRODUCTION\" instances in any case, since a 1-node \"PRODUCTION\" instance is functionally identical to a \"DEVELOPMENT\" instance, but without the accompanying restrictions.""", DeprecationWarning)
pulumi.log.warn("""instance_type is deprecated: It is recommended to leave this field unspecified since the distinction between \"DEVELOPMENT\" and \"PRODUCTION\" instances is going away, and all instances will become \"PRODUCTION\" instances. This means that new and existing \"DEVELOPMENT\" instances will be converted to \"PRODUCTION\" instances. It is recommended for users to use \"PRODUCTION\" instances in any case, since a 1-node \"PRODUCTION\" instance is functionally identical to a \"DEVELOPMENT\" instance, but without the accompanying restrictions.""")
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["labels"] = labels
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
super(Instance, __self__).__init__(
'gcp:bigtable/instance:Instance',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
clusters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceClusterArgs']]]]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None) -> 'Instance':
"""
Get an existing Instance resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceClusterArgs']]]] clusters: A block of cluster configuration options. This can be specified at least once, and up to 4 times.
See structure below.
:param pulumi.Input[bool] deletion_protection: Whether or not to allow this provider to destroy the instance. Unless this field is set to false
in the statefile, a `pulumi destroy` or `pulumi up` that would delete the instance will fail.
:param pulumi.Input[str] display_name: The human-readable display name of the Bigtable instance. Defaults to the instance `name`.
:param pulumi.Input[str] instance_type: The instance type to create. One of `"DEVELOPMENT"` or `"PRODUCTION"`. Defaults to `"PRODUCTION"`.
It is recommended to leave this field unspecified since the distinction between `"DEVELOPMENT"` and `"PRODUCTION"` instances is going away,
and all instances will become `"PRODUCTION"` instances. This means that new and existing `"DEVELOPMENT"` instances will be converted to
`"PRODUCTION"` instances. It is recommended for users to use `"PRODUCTION"` instances in any case, since a 1-node `"PRODUCTION"` instance
is functionally identical to a `"DEVELOPMENT"` instance, but without the accompanying restrictions.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: A set of key/value label pairs to assign to the resource. Label keys must follow the requirements at https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements.
:param pulumi.Input[str] name: The name (also called Instance Id in the Cloud Console) of the Cloud Bigtable instance.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it
is not provided, the provider project is used.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _InstanceState.__new__(_InstanceState)
__props__.__dict__["clusters"] = clusters
__props__.__dict__["deletion_protection"] = deletion_protection
__props__.__dict__["display_name"] = display_name
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["labels"] = labels
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
return Instance(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def clusters(self) -> pulumi.Output[Sequence['outputs.InstanceCluster']]:
"""
A block of cluster configuration options. This can be specified at least once, and up to 4 times.
See structure below.
"""
return pulumi.get(self, "clusters")
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> pulumi.Output[Optional[bool]]:
"""
Whether or not to allow this provider to destroy the instance. Unless this field is set to false
in the statefile, a `pulumi destroy` or `pulumi up` that would delete the instance will fail.
"""
return pulumi.get(self, "deletion_protection")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
The human-readable display name of the Bigtable instance. Defaults to the instance `name`.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Output[Optional[str]]:
"""
The instance type to create. One of `"DEVELOPMENT"` or `"PRODUCTION"`. Defaults to `"PRODUCTION"`.
It is recommended to leave this field unspecified since the distinction between `"DEVELOPMENT"` and `"PRODUCTION"` instances is going away,
and all instances will become `"PRODUCTION"` instances. This means that new and existing `"DEVELOPMENT"` instances will be converted to
`"PRODUCTION"` instances. It is recommended for users to use `"PRODUCTION"` instances in any case, since a 1-node `"PRODUCTION"` instance
is functionally identical to a `"DEVELOPMENT"` instance, but without the accompanying restrictions.
"""
return pulumi.get(self, "instance_type")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A set of key/value label pairs to assign to the resource. Label keys must follow the requirements at https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name (also called Instance Id in the Cloud Console) of the Cloud Bigtable instance.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs. If it
is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
| 56.931095
| 581
| 0.670142
| 3,910
| 32,223
| 5.414578
| 0.063427
| 0.062869
| 0.044306
| 0.03741
| 0.921213
| 0.915356
| 0.907562
| 0.903453
| 0.901044
| 0.892778
| 0
| 0.00101
| 0.231667
| 32,223
| 565
| 582
| 57.031858
| 0.854108
| 0.44192
| 0
| 0.815498
| 1
| 0.02214
| 0.266067
| 0.003266
| 0
| 0
| 0
| 0
| 0
| 1
| 0.154982
| false
| 0.00369
| 0.02583
| 0
| 0.273063
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
908c029390d37434d910eedd293c7a6d937f9e54
| 74
|
py
|
Python
|
python_demo/numpy_demo.py
|
MatrixAI/Python-Demo
|
688d36344f6ae303d90faf55165b5fec6c47446c
|
[
"Apache-2.0"
] | null | null | null |
python_demo/numpy_demo.py
|
MatrixAI/Python-Demo
|
688d36344f6ae303d90faf55165b5fec6c47446c
|
[
"Apache-2.0"
] | 8
|
2019-12-20T02:45:05.000Z
|
2021-09-06T07:19:54.000Z
|
python_demo/numpy_demo.py
|
MatrixAI/Python-Demo
|
688d36344f6ae303d90faf55165b5fec6c47446c
|
[
"Apache-2.0"
] | 1
|
2020-02-21T13:36:22.000Z
|
2020-02-21T13:36:22.000Z
|
import numpy as np
def give_matrix():
return np.array([1, 2, 3, 4])
| 12.333333
| 33
| 0.621622
| 14
| 74
| 3.214286
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 0.22973
| 74
| 5
| 34
| 14.8
| 0.719298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
90ab0d1e479a73297f1ef5a37ea6000ae6917a3d
| 16,454
|
py
|
Python
|
poda/layers/convolutional.py
|
gideonmanurung/poda
|
0a64cfa474f82acb891454141bc537d81bc77092
|
[
"MIT"
] | null | null | null |
poda/layers/convolutional.py
|
gideonmanurung/poda
|
0a64cfa474f82acb891454141bc537d81bc77092
|
[
"MIT"
] | 4
|
2020-09-26T01:08:59.000Z
|
2022-02-10T01:40:42.000Z
|
poda/layers/convolutional.py
|
gideonmanurung/poda
|
0a64cfa474f82acb891454141bc537d81bc77092
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from poda.layers.activation import *
from poda.layers.dense import *
from poda.layers.regularizer import *
def avarage_pool_1d(input_tensor, kernel_sizes=(3), stride_sizes=(1), paddings='same', names=None):
"""[summary]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer]
Keyword Arguments:
kernel_sizes {tuple} -- [Size of kernel] (default: {(3,3)})
stride_sizes {tuple} -- [Size of striding of kernel] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
names {[type]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [A layer with avarage pool 2D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'avg_pool_1d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
layer = tf.nn.avg_pool1d(value=input_tensor,ksize=kernel_sizes,strides=stride_sizes,padding=paddings,data_format='NHWC',name=names)
return layer
def avarage_pool_2d(input_tensor, kernel_sizes=(3,3), stride_sizes=(1,1), paddings='same', names=None):
"""[summary]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer]
Keyword Arguments:
kernel_sizes {tuple} -- [Size of kernel] (default: {(3,3)})
stride_sizes {tuple} -- [Size of striding of kernel] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
names {[type]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [A layer with avarage pool 2D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'avg_pool_2d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
layer = tf.nn.avg_pool2d(value=input_tensor,ksize=kernel_sizes,strides=stride_sizes,padding=paddings,data_format='NHWC',name=names)
return layer
def avarage_pool_3d(input_tensor, kernel_sizes=(3,3,3), stride_sizes=(1,1,1), paddings='same', names=None):
"""[summary]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer]
Keyword Arguments:
kernel_sizes {tuple} -- [Size of kernel] (default: {(3,3)})
stride_sizes {tuple} -- [Size of striding of kernel] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
names {[type]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [A layer with avarage pool 2D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'avg_pool_3d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
layer = tf.nn.avg_pool3d(value=input_tensor,ksize=kernel_sizes,strides=stride_sizes,padding=paddings,data_format='NHWC',name=names)
return layer
def batch_normalization(input_tensor, is_trainable=True):
"""[summary]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer]
Keyword Arguments:
is_trainable {bool} -- [State of trainable layer] (default: {True})
Returns:
[Tensor] -- [A trainable tensor]
"""
bn = tf.compat.v1.keras.layers.BatchNormalization()
return bn(inputs=input_tensor,training=is_trainable)
def convolution_1d(input_tensor, number_filters, kernel_sizes=3, stride_sizes=(1,1), paddings='same', activations='relu', batch_normalizations=False, dropout_rates=None, names=None):
"""[summary]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer values]
number_filters {[int]} -- [the dimensionality of the output space (i.e. the number of filters in the convolution).]
Keyword Arguments:
kernel_sizes {int} -- [Size of kernel] (default: {3})
stride_sizes {tuple} -- [Size of striding of kernel] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
activation {str} -- [Type of activation function in layer] (default: {'relu'})
dropout_rates {[type]} -- [Value of dropout rate and determine to use dropout or not] (default: {None})
names {[str]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [Layer convolution 1D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'conv_1d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
weight = new_weights(shapes=[kernel_sizes, input_tensor.get_shape().as_list()[-1], number_filters], names=names, dtypes=tf.float32)
layer = tf.nn.conv1d(value=input_tensor, filters=weight, stride=stride_sizes[0], padding=paddings, use_cudnn_on_gpu=True, data_format=None, name='conv_1d_'+names)
if activations!=None:
layer = define_activation_function(input_tensor=layer, activation_name=activations, names=names)
else:
layer = layer
if batch_normalizations:
layer = batch_normalization(input_tensor=layer, is_trainable=batch_normalizations)
else:
layer = layer
if dropout_rates!=None:
layer = dropout(input_tensor=layer, dropout_rates=dropout_rates, names=names)
else:
layer = layer
return layer
def convolution_2d(input_tensor, number_filters, kernel_sizes=(3,3), stride_sizes=(1,1), paddings='same', activations='relu', batch_normalizations=False, dropout_rates=None, names=None):
"""[This function creates a convolution kernel that is convolved (actually cross-correlated) with the layer input to produce a tensor of outputs]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer values]
number_filters {[int]} -- [the dimensionality of the output space (i.e. the number of filters in the convolution).]
Keyword Arguments:
kernel_sizes {int} -- [description] (default: {(3,3)})
stride_sizes {tuple} -- [description] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
activation {str} -- [Type of activation function in layer] (default: {'relu'})
dropout_layer {[float]} -- [Value of dropout rate and determine to use dropout or not] (default: {None})
names {[str]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [Layer convolution 2D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'conv_2d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
weight = new_weights(shapes=[kernel_sizes[0], kernel_sizes[1], input_tensor.get_shape().as_list()[-1], number_filters], names=names, dtypes=tf.float32)
layer = tf.nn.conv2d(input=input_tensor, filter=weight, strides=[stride_sizes[0], stride_sizes[1]], padding=paddings, use_cudnn_on_gpu=True,
data_format='NHWC', dilations=[1, 1, 1, 1], name='conv_2d_'+names)
if activations!=None:
layer = define_activation_function(input_tensor=layer, activation_names=activations, names=names)
else:
layer = layer
if batch_normalizations:
layer = batch_normalization(input_tensor=layer, is_trainable=batch_normalizations)
else:
layer = layer
if dropout_rates!=None:
layer = dropout(input_tensor=layer, dropout_rates=dropout_rates, names=names)
else:
layer = layer
return layer
def convolution_3d(input_tensor, number_filters, kernel_sizes=(3,3,3), stride_sizes=(1,1), paddings='same', activations='relu', batch_normalizations=False, dropout_rates=None, names=None):
"""[summary]
Arguments:
input_tensor {[type]} -- [description]
number_filters {[type]} -- [description]
Keyword Arguments:
kernel_sizes {tuple} -- [description] (default: {(3,3,3)})
stride_sizes {tuple} -- [description] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
activations {str} -- [Type of activation function in layer] (default: {'relu'})
dropout_rates {[type]} -- [Value of dropout rate and determine to use dropout or not] (default: {None})
names {[type]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [Layer convolution 1D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'conv_3d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
weight = new_weights(shapes=[kernel_sizes[0], kernel_sizes[1], kernel_sizes[1], input_tensor.get_shape().as_list()[-1], number_filters], names=names, dtypes=tf.float32)
layer = tf.nn.conv3d(input,filter,strides=[stride_sizes[0],stride_sizes[1],stride_sizes],padding=paddings,data_format='NDHWC',dilations=[1, 1, 1, 1, 1],name='conv_3d_'+names)
if activations!=None:
layer = define_activation_function(input_tensor=layer, activation_name=activations, names=names)
else:
layer = layer
if batch_normalizations:
layer = batch_normalization(input_tensor=layer, is_trainable=batch_normalizations)
else:
layer = layer
if dropout_rates!=None:
layer = dropout(input_tensor=layer, dropout_rates=dropout_rates, names=names)
else:
layer = layer
return layer
def depthwise_convolution_2d(input_tensor, number_filters=1, kernel_sizes=(3,3), stride_sizes=(1,1), paddings='same', activations='relu', batch_normalizations=False, dropout_rates=None, names=None):
"""[Function for adding depthwise convolution 2D layer]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer values]
number_filters {[int]} -- [the multiplier dimensionality of the output space (i.e. the number of filters in the convolution).]
Keyword Arguments:
kernel_size {int , int} -- [Size of kernel] (default: {(3,3)})
stride_size {int , int} -- [Size of striding of kernel] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
activations {str} -- [Type of activation function in layer] (default: {'relu'})
dropout_layer {[float]} -- [Value of dropout rate and determine to use dropout or not] (default: {None})
names {[str]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [Layer depthwise convolution 2D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'deptwise_conv_2d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
weight = new_weights(shapes=[kernel_sizes[0], kernel_sizes[1], input_tensor.get_shape().as_list()[-1], number_filters], names=names, dtypes=tf.float32)
layer = tf.nn.depthwise_conv2d(input=input_tensor, filter=weight, strides=[stride_sizes[0], stride_sizes[1]], padding=paddings, rate=None, name='deptwise_conv_2d_'+names, data_format=None )
if activations!=None:
layer = define_activation_function(input_tensor=layer, activation_name=activations, names=names)
else:
layer = layer
if batch_normalizations:
layer = batch_normalization(input_tensor=layer, is_trainable=batch_normalizations)
else:
layer = layer
if dropout_rates!=None:
layer = dropout(input_tensor=layer, dropout_rates=dropout_rates, names=names)
else:
layer = layer
return layer
def max_pool_1d(input_tensor, pool_sizes=(2), stride_sizes=(1), paddings='same', names=None):
"""[summary]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer values]
Keyword Arguments:
pool_size {tuple} -- [Size of kernel] (default: {(2,2)})
stride_sizes {tuple} -- [Size of striding of kernel] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
names {[str]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [A layer with maxpool 1D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'max_pool_1d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
layer = tf.nn.max_pool1d(input=input_tensor, ksize=pool_sizes, strides=stride_sizes, padding=paddings,name=None)
return layer
def max_pool_2d(input_tensor, pool_sizes=(2,2), stride_sizes=(1,1), paddings='same', names=None):
"""[summary]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer values]
Keyword Arguments:
pool_size {tuple} -- [Size of kernel] (default: {(2,2)})
stride_sizes {tuple} -- [Size of striding of kernel] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
names {[str]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [A layer with maxpool 2D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'max_pool_2d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
layer = tf.nn.max_pool2d(input=input_tensor, ksize=pool_sizes, strides=stride_sizes, padding=paddings,name=names)
return layer
def max_pool_3d(input_tensor, pool_sizes=(2,2,2), stride_sizes=(1,1,1), paddings='same', names=None):
"""[summary]
Arguments:
input_tensor {[float, double, int32, int64, uint8, int16, or int8]} -- [A Tensor representing prelayer values]
Keyword Arguments:
pool_size {tuple} -- [Size of kernel] (default: {(2,2)})
stride_sizes {tuple} -- [Size of striding of kernel] (default: {(1,1)})
paddings {str} -- [Indicating the type of padding algorithm to use] (default: {'same'})
names {[str]} -- [Name of the layer] (default: {None})
Returns:
[Tensor] -- [A layer with maxpool 3D with dtype tf.float32]
"""
if names!=None:
names = str(names)
else:
names = 'max_pool_3d'
if paddings=='Valid' or paddings=='valid' or paddings=='VALID':
paddings = 'VALID'
elif paddings=='Same' or paddings=='same' or paddings=='SAME':
paddings = 'SAME'
else:
paddings = 'SAME'
layer = tf.nn.max_pool3d(input=input_tensor, ksize=pool_sizes, strides=stride_sizes, padding=paddings,name=None)
return layer
| 41.032419
| 198
| 0.644463
| 2,072
| 16,454
| 4.992761
| 0.072394
| 0.069599
| 0.029
| 0.044466
| 0.920831
| 0.906912
| 0.886902
| 0.877912
| 0.873272
| 0.864089
| 0
| 0.020419
| 0.22019
| 16,454
| 401
| 199
| 41.032419
| 0.785831
| 0.408837
| 0
| 0.784211
| 0
| 0
| 0.068535
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057895
| false
| 0
| 0.021053
| 0
| 0.136842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90b2313e4522517cda52d818bd40fbd6bfcfe694
| 77
|
py
|
Python
|
GamesKeeper/models/__init__.py
|
joaoh1/GamesKeeper
|
4d28e70beb24f7e8a6abd2d2ec3ed696156a00c8
|
[
"MIT"
] | null | null | null |
GamesKeeper/models/__init__.py
|
joaoh1/GamesKeeper
|
4d28e70beb24f7e8a6abd2d2ec3ed696156a00c8
|
[
"MIT"
] | null | null | null |
GamesKeeper/models/__init__.py
|
joaoh1/GamesKeeper
|
4d28e70beb24f7e8a6abd2d2ec3ed696156a00c8
|
[
"MIT"
] | 2
|
2019-06-28T18:39:28.000Z
|
2019-06-29T03:11:47.000Z
|
from GamesKeeper.models.guild import *
from GamesKeeper.models.games import *
| 38.5
| 38
| 0.831169
| 10
| 77
| 6.4
| 0.6
| 0.46875
| 0.65625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 77
| 2
| 39
| 38.5
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
90d85349ba8476671f7c5276ab099ca1b7cfda0c
| 45,270
|
py
|
Python
|
sdk/python/pulumi_azure/iot/security_solution.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/iot/security_solution.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/iot/security_solution.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['SecuritySolutionArgs', 'SecuritySolution']
@pulumi.input_type
class SecuritySolutionArgs:
def __init__(__self__, *,
display_name: pulumi.Input[str],
iothub_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
resource_group_name: pulumi.Input[str],
additional_workspaces: Optional[pulumi.Input[Sequence[pulumi.Input['SecuritySolutionAdditionalWorkspaceArgs']]]] = None,
disabled_data_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
events_to_exports: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
log_unmasked_ips_enabled: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
query_for_resources: Optional[pulumi.Input[str]] = None,
query_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
recommendations_enabled: Optional[pulumi.Input['SecuritySolutionRecommendationsEnabledArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a SecuritySolution resource.
:param pulumi.Input[str] display_name: Specifies the Display Name for this Iot Security Solution.
:param pulumi.Input[Sequence[pulumi.Input[str]]] iothub_ids: Specifies the IoT Hub resource IDs to which this Iot Security Solution is applied.
:param pulumi.Input[str] resource_group_name: Specifies the name of the resource group in which to create the Iot Security Solution. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input['SecuritySolutionAdditionalWorkspaceArgs']]] additional_workspaces: A `additional_workspace` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] disabled_data_sources: A list of disabled data sources for the Iot Security Solution. Possible value is `TwinData`.
:param pulumi.Input[bool] enabled: Is the Iot Security Solution enabled? Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events_to_exports: A list of data which is to exported to analytic workspace. Valid values include `RawEvents`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] log_analytics_workspace_id: Specifies the Log Analytics Workspace ID to which the security data will be sent.
:param pulumi.Input[bool] log_unmasked_ips_enabled: Should ip addressed be unmasked in the log? Defaults to `false`.
:param pulumi.Input[str] name: Specifies the name of the Iot Security Solution. Changing this forces a new resource to be created.
:param pulumi.Input[str] query_for_resources: An Azure Resource Graph query used to set the resources monitored.
:param pulumi.Input[Sequence[pulumi.Input[str]]] query_subscription_ids: A list of subscription Ids on which the user defined resources query should be executed.
:param pulumi.Input['SecuritySolutionRecommendationsEnabledArgs'] recommendations_enabled: A `recommendations_enabled` block of options to enable or disable as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "iothub_ids", iothub_ids)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if additional_workspaces is not None:
pulumi.set(__self__, "additional_workspaces", additional_workspaces)
if disabled_data_sources is not None:
pulumi.set(__self__, "disabled_data_sources", disabled_data_sources)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if events_to_exports is not None:
pulumi.set(__self__, "events_to_exports", events_to_exports)
if location is not None:
pulumi.set(__self__, "location", location)
if log_analytics_workspace_id is not None:
pulumi.set(__self__, "log_analytics_workspace_id", log_analytics_workspace_id)
if log_unmasked_ips_enabled is not None:
pulumi.set(__self__, "log_unmasked_ips_enabled", log_unmasked_ips_enabled)
if name is not None:
pulumi.set(__self__, "name", name)
if query_for_resources is not None:
pulumi.set(__self__, "query_for_resources", query_for_resources)
if query_subscription_ids is not None:
pulumi.set(__self__, "query_subscription_ids", query_subscription_ids)
if recommendations_enabled is not None:
pulumi.set(__self__, "recommendations_enabled", recommendations_enabled)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
Specifies the Display Name for this Iot Security Solution.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="iothubIds")
def iothub_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
Specifies the IoT Hub resource IDs to which this Iot Security Solution is applied.
"""
return pulumi.get(self, "iothub_ids")
@iothub_ids.setter
def iothub_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "iothub_ids", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
Specifies the name of the resource group in which to create the Iot Security Solution. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="additionalWorkspaces")
def additional_workspaces(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SecuritySolutionAdditionalWorkspaceArgs']]]]:
"""
A `additional_workspace` block as defined below.
"""
return pulumi.get(self, "additional_workspaces")
@additional_workspaces.setter
def additional_workspaces(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SecuritySolutionAdditionalWorkspaceArgs']]]]):
pulumi.set(self, "additional_workspaces", value)
@property
@pulumi.getter(name="disabledDataSources")
def disabled_data_sources(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of disabled data sources for the Iot Security Solution. Possible value is `TwinData`.
"""
return pulumi.get(self, "disabled_data_sources")
@disabled_data_sources.setter
def disabled_data_sources(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "disabled_data_sources", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Is the Iot Security Solution enabled? Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="eventsToExports")
def events_to_exports(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of data which is to exported to analytic workspace. Valid values include `RawEvents`.
"""
return pulumi.get(self, "events_to_exports")
@events_to_exports.setter
def events_to_exports(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "events_to_exports", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="logAnalyticsWorkspaceId")
def log_analytics_workspace_id(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the Log Analytics Workspace ID to which the security data will be sent.
"""
return pulumi.get(self, "log_analytics_workspace_id")
@log_analytics_workspace_id.setter
def log_analytics_workspace_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "log_analytics_workspace_id", value)
@property
@pulumi.getter(name="logUnmaskedIpsEnabled")
def log_unmasked_ips_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Should ip addressed be unmasked in the log? Defaults to `false`.
"""
return pulumi.get(self, "log_unmasked_ips_enabled")
@log_unmasked_ips_enabled.setter
def log_unmasked_ips_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "log_unmasked_ips_enabled", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Iot Security Solution. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="queryForResources")
def query_for_resources(self) -> Optional[pulumi.Input[str]]:
"""
An Azure Resource Graph query used to set the resources monitored.
"""
return pulumi.get(self, "query_for_resources")
@query_for_resources.setter
def query_for_resources(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "query_for_resources", value)
@property
@pulumi.getter(name="querySubscriptionIds")
def query_subscription_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of subscription Ids on which the user defined resources query should be executed.
"""
return pulumi.get(self, "query_subscription_ids")
@query_subscription_ids.setter
def query_subscription_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "query_subscription_ids", value)
@property
@pulumi.getter(name="recommendationsEnabled")
def recommendations_enabled(self) -> Optional[pulumi.Input['SecuritySolutionRecommendationsEnabledArgs']]:
"""
A `recommendations_enabled` block of options to enable or disable as defined below.
"""
return pulumi.get(self, "recommendations_enabled")
@recommendations_enabled.setter
def recommendations_enabled(self, value: Optional[pulumi.Input['SecuritySolutionRecommendationsEnabledArgs']]):
pulumi.set(self, "recommendations_enabled", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _SecuritySolutionState:
def __init__(__self__, *,
additional_workspaces: Optional[pulumi.Input[Sequence[pulumi.Input['SecuritySolutionAdditionalWorkspaceArgs']]]] = None,
disabled_data_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
events_to_exports: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
iothub_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
log_unmasked_ips_enabled: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
query_for_resources: Optional[pulumi.Input[str]] = None,
query_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
recommendations_enabled: Optional[pulumi.Input['SecuritySolutionRecommendationsEnabledArgs']] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering SecuritySolution resources.
:param pulumi.Input[Sequence[pulumi.Input['SecuritySolutionAdditionalWorkspaceArgs']]] additional_workspaces: A `additional_workspace` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] disabled_data_sources: A list of disabled data sources for the Iot Security Solution. Possible value is `TwinData`.
:param pulumi.Input[str] display_name: Specifies the Display Name for this Iot Security Solution.
:param pulumi.Input[bool] enabled: Is the Iot Security Solution enabled? Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events_to_exports: A list of data which is to exported to analytic workspace. Valid values include `RawEvents`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] iothub_ids: Specifies the IoT Hub resource IDs to which this Iot Security Solution is applied.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] log_analytics_workspace_id: Specifies the Log Analytics Workspace ID to which the security data will be sent.
:param pulumi.Input[bool] log_unmasked_ips_enabled: Should ip addressed be unmasked in the log? Defaults to `false`.
:param pulumi.Input[str] name: Specifies the name of the Iot Security Solution. Changing this forces a new resource to be created.
:param pulumi.Input[str] query_for_resources: An Azure Resource Graph query used to set the resources monitored.
:param pulumi.Input[Sequence[pulumi.Input[str]]] query_subscription_ids: A list of subscription Ids on which the user defined resources query should be executed.
:param pulumi.Input['SecuritySolutionRecommendationsEnabledArgs'] recommendations_enabled: A `recommendations_enabled` block of options to enable or disable as defined below.
:param pulumi.Input[str] resource_group_name: Specifies the name of the resource group in which to create the Iot Security Solution. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
if additional_workspaces is not None:
pulumi.set(__self__, "additional_workspaces", additional_workspaces)
if disabled_data_sources is not None:
pulumi.set(__self__, "disabled_data_sources", disabled_data_sources)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if events_to_exports is not None:
pulumi.set(__self__, "events_to_exports", events_to_exports)
if iothub_ids is not None:
pulumi.set(__self__, "iothub_ids", iothub_ids)
if location is not None:
pulumi.set(__self__, "location", location)
if log_analytics_workspace_id is not None:
pulumi.set(__self__, "log_analytics_workspace_id", log_analytics_workspace_id)
if log_unmasked_ips_enabled is not None:
pulumi.set(__self__, "log_unmasked_ips_enabled", log_unmasked_ips_enabled)
if name is not None:
pulumi.set(__self__, "name", name)
if query_for_resources is not None:
pulumi.set(__self__, "query_for_resources", query_for_resources)
if query_subscription_ids is not None:
pulumi.set(__self__, "query_subscription_ids", query_subscription_ids)
if recommendations_enabled is not None:
pulumi.set(__self__, "recommendations_enabled", recommendations_enabled)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="additionalWorkspaces")
def additional_workspaces(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SecuritySolutionAdditionalWorkspaceArgs']]]]:
"""
A `additional_workspace` block as defined below.
"""
return pulumi.get(self, "additional_workspaces")
@additional_workspaces.setter
def additional_workspaces(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SecuritySolutionAdditionalWorkspaceArgs']]]]):
pulumi.set(self, "additional_workspaces", value)
@property
@pulumi.getter(name="disabledDataSources")
def disabled_data_sources(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of disabled data sources for the Iot Security Solution. Possible value is `TwinData`.
"""
return pulumi.get(self, "disabled_data_sources")
@disabled_data_sources.setter
def disabled_data_sources(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "disabled_data_sources", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the Display Name for this Iot Security Solution.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Is the Iot Security Solution enabled? Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="eventsToExports")
def events_to_exports(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of data which is to exported to analytic workspace. Valid values include `RawEvents`.
"""
return pulumi.get(self, "events_to_exports")
@events_to_exports.setter
def events_to_exports(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "events_to_exports", value)
@property
@pulumi.getter(name="iothubIds")
def iothub_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Specifies the IoT Hub resource IDs to which this Iot Security Solution is applied.
"""
return pulumi.get(self, "iothub_ids")
@iothub_ids.setter
def iothub_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "iothub_ids", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="logAnalyticsWorkspaceId")
def log_analytics_workspace_id(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the Log Analytics Workspace ID to which the security data will be sent.
"""
return pulumi.get(self, "log_analytics_workspace_id")
@log_analytics_workspace_id.setter
def log_analytics_workspace_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "log_analytics_workspace_id", value)
@property
@pulumi.getter(name="logUnmaskedIpsEnabled")
def log_unmasked_ips_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Should ip addressed be unmasked in the log? Defaults to `false`.
"""
return pulumi.get(self, "log_unmasked_ips_enabled")
@log_unmasked_ips_enabled.setter
def log_unmasked_ips_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "log_unmasked_ips_enabled", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Iot Security Solution. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="queryForResources")
def query_for_resources(self) -> Optional[pulumi.Input[str]]:
"""
An Azure Resource Graph query used to set the resources monitored.
"""
return pulumi.get(self, "query_for_resources")
@query_for_resources.setter
def query_for_resources(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "query_for_resources", value)
@property
@pulumi.getter(name="querySubscriptionIds")
def query_subscription_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of subscription Ids on which the user defined resources query should be executed.
"""
return pulumi.get(self, "query_subscription_ids")
@query_subscription_ids.setter
def query_subscription_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "query_subscription_ids", value)
@property
@pulumi.getter(name="recommendationsEnabled")
def recommendations_enabled(self) -> Optional[pulumi.Input['SecuritySolutionRecommendationsEnabledArgs']]:
"""
A `recommendations_enabled` block of options to enable or disable as defined below.
"""
return pulumi.get(self, "recommendations_enabled")
@recommendations_enabled.setter
def recommendations_enabled(self, value: Optional[pulumi.Input['SecuritySolutionRecommendationsEnabledArgs']]):
pulumi.set(self, "recommendations_enabled", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the resource group in which to create the Iot Security Solution. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class SecuritySolution(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_workspaces: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecuritySolutionAdditionalWorkspaceArgs']]]]] = None,
disabled_data_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
events_to_exports: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
iothub_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
log_unmasked_ips_enabled: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
query_for_resources: Optional[pulumi.Input[str]] = None,
query_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
recommendations_enabled: Optional[pulumi.Input[pulumi.InputType['SecuritySolutionRecommendationsEnabledArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages an iot security solution.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_io_t_hub = azure.iot.IoTHub("exampleIoTHub",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
sku=azure.iot.IoTHubSkuArgs(
name="S1",
capacity=1,
))
example_security_solution = azure.iot.SecuritySolution("exampleSecuritySolution",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
display_name="Iot Security Solution",
iothub_ids=[example_io_t_hub.id])
```
## Import
Iot Security Solution can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:iot/securitySolution:SecuritySolution example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Security/IoTSecuritySolutions/solution1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecuritySolutionAdditionalWorkspaceArgs']]]] additional_workspaces: A `additional_workspace` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] disabled_data_sources: A list of disabled data sources for the Iot Security Solution. Possible value is `TwinData`.
:param pulumi.Input[str] display_name: Specifies the Display Name for this Iot Security Solution.
:param pulumi.Input[bool] enabled: Is the Iot Security Solution enabled? Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events_to_exports: A list of data which is to exported to analytic workspace. Valid values include `RawEvents`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] iothub_ids: Specifies the IoT Hub resource IDs to which this Iot Security Solution is applied.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] log_analytics_workspace_id: Specifies the Log Analytics Workspace ID to which the security data will be sent.
:param pulumi.Input[bool] log_unmasked_ips_enabled: Should ip addressed be unmasked in the log? Defaults to `false`.
:param pulumi.Input[str] name: Specifies the name of the Iot Security Solution. Changing this forces a new resource to be created.
:param pulumi.Input[str] query_for_resources: An Azure Resource Graph query used to set the resources monitored.
:param pulumi.Input[Sequence[pulumi.Input[str]]] query_subscription_ids: A list of subscription Ids on which the user defined resources query should be executed.
:param pulumi.Input[pulumi.InputType['SecuritySolutionRecommendationsEnabledArgs']] recommendations_enabled: A `recommendations_enabled` block of options to enable or disable as defined below.
:param pulumi.Input[str] resource_group_name: Specifies the name of the resource group in which to create the Iot Security Solution. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SecuritySolutionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an iot security solution.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_io_t_hub = azure.iot.IoTHub("exampleIoTHub",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
sku=azure.iot.IoTHubSkuArgs(
name="S1",
capacity=1,
))
example_security_solution = azure.iot.SecuritySolution("exampleSecuritySolution",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
display_name="Iot Security Solution",
iothub_ids=[example_io_t_hub.id])
```
## Import
Iot Security Solution can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:iot/securitySolution:SecuritySolution example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Security/IoTSecuritySolutions/solution1
```
:param str resource_name: The name of the resource.
:param SecuritySolutionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SecuritySolutionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_workspaces: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecuritySolutionAdditionalWorkspaceArgs']]]]] = None,
disabled_data_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
events_to_exports: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
iothub_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
log_unmasked_ips_enabled: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
query_for_resources: Optional[pulumi.Input[str]] = None,
query_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
recommendations_enabled: Optional[pulumi.Input[pulumi.InputType['SecuritySolutionRecommendationsEnabledArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SecuritySolutionArgs.__new__(SecuritySolutionArgs)
__props__.__dict__["additional_workspaces"] = additional_workspaces
__props__.__dict__["disabled_data_sources"] = disabled_data_sources
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
__props__.__dict__["enabled"] = enabled
__props__.__dict__["events_to_exports"] = events_to_exports
if iothub_ids is None and not opts.urn:
raise TypeError("Missing required property 'iothub_ids'")
__props__.__dict__["iothub_ids"] = iothub_ids
__props__.__dict__["location"] = location
__props__.__dict__["log_analytics_workspace_id"] = log_analytics_workspace_id
__props__.__dict__["log_unmasked_ips_enabled"] = log_unmasked_ips_enabled
__props__.__dict__["name"] = name
__props__.__dict__["query_for_resources"] = query_for_resources
__props__.__dict__["query_subscription_ids"] = query_subscription_ids
__props__.__dict__["recommendations_enabled"] = recommendations_enabled
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
super(SecuritySolution, __self__).__init__(
'azure:iot/securitySolution:SecuritySolution',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
additional_workspaces: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecuritySolutionAdditionalWorkspaceArgs']]]]] = None,
disabled_data_sources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
events_to_exports: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
iothub_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
log_unmasked_ips_enabled: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
query_for_resources: Optional[pulumi.Input[str]] = None,
query_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
recommendations_enabled: Optional[pulumi.Input[pulumi.InputType['SecuritySolutionRecommendationsEnabledArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'SecuritySolution':
"""
Get an existing SecuritySolution resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecuritySolutionAdditionalWorkspaceArgs']]]] additional_workspaces: A `additional_workspace` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] disabled_data_sources: A list of disabled data sources for the Iot Security Solution. Possible value is `TwinData`.
:param pulumi.Input[str] display_name: Specifies the Display Name for this Iot Security Solution.
:param pulumi.Input[bool] enabled: Is the Iot Security Solution enabled? Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events_to_exports: A list of data which is to exported to analytic workspace. Valid values include `RawEvents`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] iothub_ids: Specifies the IoT Hub resource IDs to which this Iot Security Solution is applied.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] log_analytics_workspace_id: Specifies the Log Analytics Workspace ID to which the security data will be sent.
:param pulumi.Input[bool] log_unmasked_ips_enabled: Should ip addressed be unmasked in the log? Defaults to `false`.
:param pulumi.Input[str] name: Specifies the name of the Iot Security Solution. Changing this forces a new resource to be created.
:param pulumi.Input[str] query_for_resources: An Azure Resource Graph query used to set the resources monitored.
:param pulumi.Input[Sequence[pulumi.Input[str]]] query_subscription_ids: A list of subscription Ids on which the user defined resources query should be executed.
:param pulumi.Input[pulumi.InputType['SecuritySolutionRecommendationsEnabledArgs']] recommendations_enabled: A `recommendations_enabled` block of options to enable or disable as defined below.
:param pulumi.Input[str] resource_group_name: Specifies the name of the resource group in which to create the Iot Security Solution. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SecuritySolutionState.__new__(_SecuritySolutionState)
__props__.__dict__["additional_workspaces"] = additional_workspaces
__props__.__dict__["disabled_data_sources"] = disabled_data_sources
__props__.__dict__["display_name"] = display_name
__props__.__dict__["enabled"] = enabled
__props__.__dict__["events_to_exports"] = events_to_exports
__props__.__dict__["iothub_ids"] = iothub_ids
__props__.__dict__["location"] = location
__props__.__dict__["log_analytics_workspace_id"] = log_analytics_workspace_id
__props__.__dict__["log_unmasked_ips_enabled"] = log_unmasked_ips_enabled
__props__.__dict__["name"] = name
__props__.__dict__["query_for_resources"] = query_for_resources
__props__.__dict__["query_subscription_ids"] = query_subscription_ids
__props__.__dict__["recommendations_enabled"] = recommendations_enabled
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
return SecuritySolution(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="additionalWorkspaces")
def additional_workspaces(self) -> pulumi.Output[Optional[Sequence['outputs.SecuritySolutionAdditionalWorkspace']]]:
"""
A `additional_workspace` block as defined below.
"""
return pulumi.get(self, "additional_workspaces")
@property
@pulumi.getter(name="disabledDataSources")
def disabled_data_sources(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of disabled data sources for the Iot Security Solution. Possible value is `TwinData`.
"""
return pulumi.get(self, "disabled_data_sources")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
Specifies the Display Name for this Iot Security Solution.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Is the Iot Security Solution enabled? Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="eventsToExports")
def events_to_exports(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of data which is to exported to analytic workspace. Valid values include `RawEvents`.
"""
return pulumi.get(self, "events_to_exports")
@property
@pulumi.getter(name="iothubIds")
def iothub_ids(self) -> pulumi.Output[Sequence[str]]:
"""
Specifies the IoT Hub resource IDs to which this Iot Security Solution is applied.
"""
return pulumi.get(self, "iothub_ids")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="logAnalyticsWorkspaceId")
def log_analytics_workspace_id(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the Log Analytics Workspace ID to which the security data will be sent.
"""
return pulumi.get(self, "log_analytics_workspace_id")
@property
@pulumi.getter(name="logUnmaskedIpsEnabled")
def log_unmasked_ips_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Should ip addressed be unmasked in the log? Defaults to `false`.
"""
return pulumi.get(self, "log_unmasked_ips_enabled")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Iot Security Solution. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="queryForResources")
def query_for_resources(self) -> pulumi.Output[str]:
"""
An Azure Resource Graph query used to set the resources monitored.
"""
return pulumi.get(self, "query_for_resources")
@property
@pulumi.getter(name="querySubscriptionIds")
def query_subscription_ids(self) -> pulumi.Output[Sequence[str]]:
"""
A list of subscription Ids on which the user defined resources query should be executed.
"""
return pulumi.get(self, "query_subscription_ids")
@property
@pulumi.getter(name="recommendationsEnabled")
def recommendations_enabled(self) -> pulumi.Output['outputs.SecuritySolutionRecommendationsEnabled']:
"""
A `recommendations_enabled` block of options to enable or disable as defined below.
"""
return pulumi.get(self, "recommendations_enabled")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
Specifies the name of the resource group in which to create the Iot Security Solution. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
| 52.094361
| 213
| 0.687431
| 5,311
| 45,270
| 5.63472
| 0.045378
| 0.101818
| 0.067834
| 0.054301
| 0.93661
| 0.929426
| 0.920771
| 0.912818
| 0.910847
| 0.901591
| 0
| 0.002055
| 0.215308
| 45,270
| 868
| 214
| 52.154378
| 0.840385
| 0.333532
| 0
| 0.837022
| 1
| 0
| 0.139161
| 0.082322
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16499
| false
| 0.002012
| 0.014085
| 0
| 0.277666
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
90f42b0939ce35c4d04949d1d542a68dd9ae2a50
| 8,542
|
py
|
Python
|
great_international/migrations/0022_auto_20190508_1300.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2018-03-20T11:19:07.000Z
|
2021-10-05T07:53:11.000Z
|
great_international/migrations/0022_auto_20190508_1300.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 802
|
2018-02-05T14:16:13.000Z
|
2022-02-10T10:59:21.000Z
|
great_international/migrations/0022_auto_20190508_1300.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2019-01-22T13:19:37.000Z
|
2019-07-01T10:35:26.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-05-08 13:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('great_international', '0021_auto_20190423_1423'),
]
operations = [
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page'),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading_ar',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading_de',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading_en_gb',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading_es',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading_fr',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading_ja',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading_pt',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_subheading_zh_hans',
field=models.TextField(blank=True, help_text='This is a subheading that displays below the main title on the article page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page'),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser_ar',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser_de',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser_en_gb',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser_es',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser_fr',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser_ja',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser_pt',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_teaser_zh_hans',
field=models.TextField(help_text='This is a subheading that displays when the article is featured on another page', null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title',
field=models.TextField(),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title_ar',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title_de',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title_en_gb',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title_es',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title_fr',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title_ja',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title_pt',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='internationalarticlepage',
name='article_title_zh_hans',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading_ar',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading_de',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading_en_gb',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading_es',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading_fr',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading_ja',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading_pt',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='internationalsectorpage',
name='sub_heading_zh_hans',
field=models.TextField(blank=True, null=True),
),
]
| 43.581633
| 147
| 0.629361
| 862
| 8,542
| 6.075406
| 0.084687
| 0.137483
| 0.171854
| 0.199351
| 0.952645
| 0.944052
| 0.939469
| 0.928585
| 0.873783
| 0.856597
| 0
| 0.005519
| 0.278857
| 8,542
| 195
| 148
| 43.805128
| 0.844643
| 0.008078
| 0
| 0.744681
| 1
| 0
| 0.344156
| 0.129516
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010638
| 0
| 0.026596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
90feec20d45234a901dd44af4e5fa2f38201ae3e
| 72
|
py
|
Python
|
tests/test_version.py
|
jhkennedy/asf-tools
|
f218cf80b98c4eb0e6f66e53244a15e198d49012
|
[
"BSD-3-Clause"
] | 2
|
2021-06-17T13:25:15.000Z
|
2021-12-01T08:19:05.000Z
|
tests/test_version.py
|
jhkennedy/asf-tools
|
f218cf80b98c4eb0e6f66e53244a15e198d49012
|
[
"BSD-3-Clause"
] | 23
|
2020-11-25T00:45:57.000Z
|
2022-03-17T22:05:58.000Z
|
tests/test_version.py
|
ASFHyP3/GIS-tools
|
435a544bd6f3f4953679e5d891c0e454f7bdd471
|
[
"BSD-3-Clause"
] | 4
|
2021-05-10T06:03:44.000Z
|
2021-10-08T19:48:31.000Z
|
import asf_tools
def test_version():
assert asf_tools.__version__
| 12
| 32
| 0.777778
| 10
| 72
| 4.9
| 0.7
| 0.326531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 72
| 5
| 33
| 14.4
| 0.816667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
294927166e7342e614f00c2e606e4e1a986c2e01
| 946,805
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_plat_chas_invmgr_ng_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_plat_chas_invmgr_ng_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_plat_chas_invmgr_ng_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_plat_chas_invmgr_ng_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR plat\-chas\-invmgr\-ng package operational data.
This module contains definitions
for the following management objects\:
platform\: Platform information
platform\-inventory\: platform inventory
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class CardRedundancyState(Enum):
"""
CardRedundancyState (Enum Class)
Redundancy state detail
.. data:: active = 1
Active
.. data:: standby = 2
Standby
"""
active = Enum.YLeaf(1, "active")
standby = Enum.YLeaf(2, "standby")
class InvAdminState(Enum):
"""
InvAdminState (Enum Class)
Inv admin state
.. data:: admin_state_invalid = 0
admin state invalid
.. data:: admin_up = 1
admin up
.. data:: admin_down = 2
admin down
"""
admin_state_invalid = Enum.YLeaf(0, "admin-state-invalid")
admin_up = Enum.YLeaf(1, "admin-up")
admin_down = Enum.YLeaf(2, "admin-down")
class InvCardState(Enum):
"""
InvCardState (Enum Class)
Inv card state
.. data:: inv_card_not_present = 0
inv card not present
.. data:: inv_card_present = 1
inv card present
.. data:: inv_card_reset = 2
inv card reset
.. data:: inv_card_booting = 3
inv card booting
.. data:: inv_card_mbi_booting = 4
inv card mbi booting
.. data:: inv_card_running_mbi = 5
inv card running mbi
.. data:: inv_card_running_ena = 6
inv card running ena
.. data:: inv_card_bring_down = 7
inv card bring down
.. data:: inv_card_ena_failure = 8
inv card ena failure
.. data:: inv_card_f_diag_run = 9
inv card f diag run
.. data:: inv_card_f_diag_failure = 10
inv card f diag failure
.. data:: inv_card_powered = 11
inv card powered
.. data:: inv_card_unpowered = 12
inv card unpowered
.. data:: inv_card_mdr = 13
inv card mdr
.. data:: inv_card_mdr_running_mbi = 14
inv card mdr running mbi
.. data:: inv_card_main_t_mode = 15
inv card main t mode
.. data:: inv_card_admin_down = 16
inv card admin down
.. data:: inv_card_no_mon = 17
inv card no mon
.. data:: inv_card_unknown = 18
inv card unknown
.. data:: inv_card_failed = 19
inv card failed
.. data:: inv_card_ok = 20
inv card ok
.. data:: inv_card_missing = 21
inv card missing
.. data:: inv_card_field_diag_downloading = 22
inv card field diag downloading
.. data:: inv_card_field_diag_unmonitor = 23
inv card field diag unmonitor
.. data:: inv_card_fabric_field_diag_unmonitor = 24
inv card fabric field diag unmonitor
.. data:: inv_card_field_diag_rp_launching = 25
inv card field diag rp launching
.. data:: inv_card_field_diag_running = 26
inv card field diag running
.. data:: inv_card_field_diag_pass = 27
inv card field diag pass
.. data:: inv_card_field_diag_fail = 28
inv card field diag fail
.. data:: inv_card_field_diag_timeout = 29
inv card field diag timeout
.. data:: inv_card_disabled = 30
inv card disabled
.. data:: inv_card_spa_booting = 31
inv card spa booting
.. data:: inv_card_not_allowed_online = 32
inv card not allowed online
.. data:: inv_card_stopped = 33
inv card stopped
.. data:: inv_card_incompatible_fw_ver = 34
inv card incompatible fw ver
.. data:: inv_card_fpd_hold = 35
inv card fpd hold
.. data:: inv_card_node_prep = 36
inv card node prep
.. data:: inv_card_updating_fpd = 37
inv card updating fpd
.. data:: inv_card_num_states = 38
inv card num states
"""
inv_card_not_present = Enum.YLeaf(0, "inv-card-not-present")
inv_card_present = Enum.YLeaf(1, "inv-card-present")
inv_card_reset = Enum.YLeaf(2, "inv-card-reset")
inv_card_booting = Enum.YLeaf(3, "inv-card-booting")
inv_card_mbi_booting = Enum.YLeaf(4, "inv-card-mbi-booting")
inv_card_running_mbi = Enum.YLeaf(5, "inv-card-running-mbi")
inv_card_running_ena = Enum.YLeaf(6, "inv-card-running-ena")
inv_card_bring_down = Enum.YLeaf(7, "inv-card-bring-down")
inv_card_ena_failure = Enum.YLeaf(8, "inv-card-ena-failure")
inv_card_f_diag_run = Enum.YLeaf(9, "inv-card-f-diag-run")
inv_card_f_diag_failure = Enum.YLeaf(10, "inv-card-f-diag-failure")
inv_card_powered = Enum.YLeaf(11, "inv-card-powered")
inv_card_unpowered = Enum.YLeaf(12, "inv-card-unpowered")
inv_card_mdr = Enum.YLeaf(13, "inv-card-mdr")
inv_card_mdr_running_mbi = Enum.YLeaf(14, "inv-card-mdr-running-mbi")
inv_card_main_t_mode = Enum.YLeaf(15, "inv-card-main-t-mode")
inv_card_admin_down = Enum.YLeaf(16, "inv-card-admin-down")
inv_card_no_mon = Enum.YLeaf(17, "inv-card-no-mon")
inv_card_unknown = Enum.YLeaf(18, "inv-card-unknown")
inv_card_failed = Enum.YLeaf(19, "inv-card-failed")
inv_card_ok = Enum.YLeaf(20, "inv-card-ok")
inv_card_missing = Enum.YLeaf(21, "inv-card-missing")
inv_card_field_diag_downloading = Enum.YLeaf(22, "inv-card-field-diag-downloading")
inv_card_field_diag_unmonitor = Enum.YLeaf(23, "inv-card-field-diag-unmonitor")
inv_card_fabric_field_diag_unmonitor = Enum.YLeaf(24, "inv-card-fabric-field-diag-unmonitor")
inv_card_field_diag_rp_launching = Enum.YLeaf(25, "inv-card-field-diag-rp-launching")
inv_card_field_diag_running = Enum.YLeaf(26, "inv-card-field-diag-running")
inv_card_field_diag_pass = Enum.YLeaf(27, "inv-card-field-diag-pass")
inv_card_field_diag_fail = Enum.YLeaf(28, "inv-card-field-diag-fail")
inv_card_field_diag_timeout = Enum.YLeaf(29, "inv-card-field-diag-timeout")
inv_card_disabled = Enum.YLeaf(30, "inv-card-disabled")
inv_card_spa_booting = Enum.YLeaf(31, "inv-card-spa-booting")
inv_card_not_allowed_online = Enum.YLeaf(32, "inv-card-not-allowed-online")
inv_card_stopped = Enum.YLeaf(33, "inv-card-stopped")
inv_card_incompatible_fw_ver = Enum.YLeaf(34, "inv-card-incompatible-fw-ver")
inv_card_fpd_hold = Enum.YLeaf(35, "inv-card-fpd-hold")
inv_card_node_prep = Enum.YLeaf(36, "inv-card-node-prep")
inv_card_updating_fpd = Enum.YLeaf(37, "inv-card-updating-fpd")
inv_card_num_states = Enum.YLeaf(38, "inv-card-num-states")
class InvMonitorState(Enum):
"""
InvMonitorState (Enum Class)
Inv monitor state
.. data:: unmonitored = 0
unmonitored
.. data:: monitored = 1
monitored
"""
unmonitored = Enum.YLeaf(0, "unmonitored")
monitored = Enum.YLeaf(1, "monitored")
class InvPowerAdminState(Enum):
"""
InvPowerAdminState (Enum Class)
Inv power admin state
.. data:: admin_power_invalid = 0
admin power invalid
.. data:: admin_on = 2
admin on
.. data:: admin_off = 3
admin off
"""
admin_power_invalid = Enum.YLeaf(0, "admin-power-invalid")
admin_on = Enum.YLeaf(2, "admin-on")
admin_off = Enum.YLeaf(3, "admin-off")
class InvResetReason(Enum):
"""
InvResetReason (Enum Class)
Inv reset reason
.. data:: module_reset_reason_unknown = 0
module reset reason unknown
.. data:: module_reset_reason_powerup = 1
module reset reason powerup
.. data:: module_reset_reason_user_shutdown = 2
module reset reason user shutdown
.. data:: module_reset_reason_user_reload = 3
module reset reason user reload
.. data:: module_reset_reason_auto_reload = 4
module reset reason auto reload
.. data:: module_reset_reason_environment = 5
module reset reason environment
.. data:: module_reset_reason_user_unpower = 6
module reset reason user unpower
"""
module_reset_reason_unknown = Enum.YLeaf(0, "module-reset-reason-unknown")
module_reset_reason_powerup = Enum.YLeaf(1, "module-reset-reason-powerup")
module_reset_reason_user_shutdown = Enum.YLeaf(2, "module-reset-reason-user-shutdown")
module_reset_reason_user_reload = Enum.YLeaf(3, "module-reset-reason-user-reload")
module_reset_reason_auto_reload = Enum.YLeaf(4, "module-reset-reason-auto-reload")
module_reset_reason_environment = Enum.YLeaf(5, "module-reset-reason-environment")
module_reset_reason_user_unpower = Enum.YLeaf(6, "module-reset-reason-user-unpower")
class NodeState(Enum):
"""
NodeState (Enum Class)
Node state detail
.. data:: not_present = 0
Not present
.. data:: present = 1
Present
.. data:: reset = 2
Reset
.. data:: rommon = 3
Card booting or rommon
.. data:: mbi_boot = 4
MBI booting
.. data:: mbi_run = 5
Running MBI
.. data:: xr_run = 6
Running ENA
.. data:: bring_down = 7
Bringdown
.. data:: xr_fail = 8
ENA failure
.. data:: fdiag_run = 9
Running FDIAG
.. data:: fdiag_fail = 10
FDIAG failure
.. data:: power = 11
Powered
.. data:: unpower = 12
Unpowered
.. data:: mdr_warm_reload = 13
MDR warm reload
.. data:: mdr_mbi_run = 14
MDR running MBI
.. data:: maintenance_mode = 15
Maintenance mode
.. data:: admin_down = 16
Admin down
.. data:: not_monitor = 17
No MON
.. data:: unknown_card = 18
Unknown
.. data:: failed = 19
Failed
.. data:: ok = 20
OK
.. data:: missing = 21
Missing
.. data:: diag_download = 22
Field diag downloading
.. data:: diag_not_monitor = 23
Field diag unmonitor
.. data:: fabric_diag_not_monitor = 24
Fabric field diag unmonitor
.. data:: diag_rp_launch = 25
Field diag RP launching
.. data:: diag_run = 26
Field diag running
.. data:: diag_pass = 27
Field diag pass
.. data:: diag_fail = 28
Field diag fail
.. data:: diag_timeout = 29
Field diag timeout
.. data:: disable = 30
Disable
.. data:: spa_boot = 31
SPA booting
.. data:: not_allowed_online = 32
Not allowed online
.. data:: stop = 33
Stopped
.. data:: incomp_version = 34
Incompatible FW version
.. data:: fpd_hold = 35
FPD hold
.. data:: xr_preparation = 36
XR preparation
.. data:: sync_ready = 37
Sync ready state
.. data:: xr_isolate = 38
Node isolate state
.. data:: ready = 39
Ready
.. data:: invalid = 40
Invalid
.. data:: operational = 41
Operational
.. data:: operational_lock = 42
Operational lock
.. data:: going_down = 43
Going down
.. data:: going_offline = 44
Going offline
.. data:: going_online = 45
Going online
.. data:: offline = 46
Offline
.. data:: up = 47
Up
.. data:: down = 48
Down
.. data:: max = 49
Max
.. data:: unknown = 50
Unknown
"""
not_present = Enum.YLeaf(0, "not-present")
present = Enum.YLeaf(1, "present")
reset = Enum.YLeaf(2, "reset")
rommon = Enum.YLeaf(3, "rommon")
mbi_boot = Enum.YLeaf(4, "mbi-boot")
mbi_run = Enum.YLeaf(5, "mbi-run")
xr_run = Enum.YLeaf(6, "xr-run")
bring_down = Enum.YLeaf(7, "bring-down")
xr_fail = Enum.YLeaf(8, "xr-fail")
fdiag_run = Enum.YLeaf(9, "fdiag-run")
fdiag_fail = Enum.YLeaf(10, "fdiag-fail")
power = Enum.YLeaf(11, "power")
unpower = Enum.YLeaf(12, "unpower")
mdr_warm_reload = Enum.YLeaf(13, "mdr-warm-reload")
mdr_mbi_run = Enum.YLeaf(14, "mdr-mbi-run")
maintenance_mode = Enum.YLeaf(15, "maintenance-mode")
admin_down = Enum.YLeaf(16, "admin-down")
not_monitor = Enum.YLeaf(17, "not-monitor")
unknown_card = Enum.YLeaf(18, "unknown-card")
failed = Enum.YLeaf(19, "failed")
ok = Enum.YLeaf(20, "ok")
missing = Enum.YLeaf(21, "missing")
diag_download = Enum.YLeaf(22, "diag-download")
diag_not_monitor = Enum.YLeaf(23, "diag-not-monitor")
fabric_diag_not_monitor = Enum.YLeaf(24, "fabric-diag-not-monitor")
diag_rp_launch = Enum.YLeaf(25, "diag-rp-launch")
diag_run = Enum.YLeaf(26, "diag-run")
diag_pass = Enum.YLeaf(27, "diag-pass")
diag_fail = Enum.YLeaf(28, "diag-fail")
diag_timeout = Enum.YLeaf(29, "diag-timeout")
disable = Enum.YLeaf(30, "disable")
spa_boot = Enum.YLeaf(31, "spa-boot")
not_allowed_online = Enum.YLeaf(32, "not-allowed-online")
stop = Enum.YLeaf(33, "stop")
incomp_version = Enum.YLeaf(34, "incomp-version")
fpd_hold = Enum.YLeaf(35, "fpd-hold")
xr_preparation = Enum.YLeaf(36, "xr-preparation")
sync_ready = Enum.YLeaf(37, "sync-ready")
xr_isolate = Enum.YLeaf(38, "xr-isolate")
ready = Enum.YLeaf(39, "ready")
invalid = Enum.YLeaf(40, "invalid")
operational = Enum.YLeaf(41, "operational")
operational_lock = Enum.YLeaf(42, "operational-lock")
going_down = Enum.YLeaf(43, "going-down")
going_offline = Enum.YLeaf(44, "going-offline")
going_online = Enum.YLeaf(45, "going-online")
offline = Enum.YLeaf(46, "offline")
up = Enum.YLeaf(47, "up")
down = Enum.YLeaf(48, "down")
max = Enum.YLeaf(49, "max")
unknown = Enum.YLeaf(50, "unknown")
class Platform(Entity):
"""
Platform information
.. attribute:: racks
Table of racks
**type**\: :py:class:`Racks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform, self).__init__()
self._top_entity = None
self.yang_name = "platform"
self.yang_parent_name = "Cisco-IOS-XR-plat-chas-invmgr-ng-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("racks", ("racks", Platform.Racks))])
self._leafs = OrderedDict()
self.racks = Platform.Racks()
self.racks.parent = self
self._children_name_map["racks"] = "racks"
self._segment_path = lambda: "Cisco-IOS-XR-plat-chas-invmgr-ng-oper:platform"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform, [], name, value)
class Racks(Entity):
"""
Table of racks
.. attribute:: rack
Rack name
**type**\: list of :py:class:`Rack <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks.Rack>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks, self).__init__()
self.yang_name = "racks"
self.yang_parent_name = "platform"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack", ("rack", Platform.Racks.Rack))])
self._leafs = OrderedDict()
self.rack = YList(self)
self._segment_path = lambda: "racks"
self._absolute_path = lambda: "Cisco-IOS-XR-plat-chas-invmgr-ng-oper:platform/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks, [], name, value)
class Rack(Entity):
"""
Rack name
.. attribute:: rack_name (key)
Rack name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: slots
Table of slots
**type**\: :py:class:`Slots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks.Rack.Slots>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks.Rack, self).__init__()
self.yang_name = "rack"
self.yang_parent_name = "racks"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['rack_name']
self._child_classes = OrderedDict([("slots", ("slots", Platform.Racks.Rack.Slots))])
self._leafs = OrderedDict([
('rack_name', (YLeaf(YType.str, 'rack-name'), ['str'])),
])
self.rack_name = None
self.slots = Platform.Racks.Rack.Slots()
self.slots.parent = self
self._children_name_map["slots"] = "slots"
self._segment_path = lambda: "rack" + "[rack-name='" + str(self.rack_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-plat-chas-invmgr-ng-oper:platform/racks/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks.Rack, ['rack_name'], name, value)
class Slots(Entity):
"""
Table of slots
.. attribute:: slot
Slot name
**type**\: list of :py:class:`Slot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks.Rack.Slots.Slot>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks.Rack.Slots, self).__init__()
self.yang_name = "slots"
self.yang_parent_name = "rack"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("slot", ("slot", Platform.Racks.Rack.Slots.Slot))])
self._leafs = OrderedDict()
self.slot = YList(self)
self._segment_path = lambda: "slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks.Rack.Slots, [], name, value)
class Slot(Entity):
"""
Slot name
.. attribute:: slot_name (key)
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: instances
Table of Instances
**type**\: :py:class:`Instances <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks.Rack.Slots.Slot.Instances>`
.. attribute:: vm
VM information
**type**\: :py:class:`Vm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks.Rack.Slots.Slot.Vm>`
.. attribute:: state
State information
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks.Rack.Slots.Slot.State>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks.Rack.Slots.Slot, self).__init__()
self.yang_name = "slot"
self.yang_parent_name = "slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['slot_name']
self._child_classes = OrderedDict([("instances", ("instances", Platform.Racks.Rack.Slots.Slot.Instances)), ("vm", ("vm", Platform.Racks.Rack.Slots.Slot.Vm)), ("state", ("state", Platform.Racks.Rack.Slots.Slot.State))])
self._leafs = OrderedDict([
('slot_name', (YLeaf(YType.str, 'slot-name'), ['str'])),
])
self.slot_name = None
self.instances = Platform.Racks.Rack.Slots.Slot.Instances()
self.instances.parent = self
self._children_name_map["instances"] = "instances"
self.vm = Platform.Racks.Rack.Slots.Slot.Vm()
self.vm.parent = self
self._children_name_map["vm"] = "vm"
self.state = Platform.Racks.Rack.Slots.Slot.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._segment_path = lambda: "slot" + "[slot-name='" + str(self.slot_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks.Rack.Slots.Slot, ['slot_name'], name, value)
class Instances(Entity):
"""
Table of Instances
.. attribute:: instance
Instance name
**type**\: list of :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks.Rack.Slots.Slot.Instances.Instance>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks.Rack.Slots.Slot.Instances, self).__init__()
self.yang_name = "instances"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("instance", ("instance", Platform.Racks.Rack.Slots.Slot.Instances.Instance))])
self._leafs = OrderedDict()
self.instance = YList(self)
self._segment_path = lambda: "instances"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks.Rack.Slots.Slot.Instances, [], name, value)
class Instance(Entity):
"""
Instance name
.. attribute:: instance_name (key)
Instance name
**type**\: str
.. attribute:: state
State information
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.Platform.Racks.Rack.Slots.Slot.Instances.Instance.State>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks.Rack.Slots.Slot.Instances.Instance, self).__init__()
self.yang_name = "instance"
self.yang_parent_name = "instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['instance_name']
self._child_classes = OrderedDict([("state", ("state", Platform.Racks.Rack.Slots.Slot.Instances.Instance.State))])
self._leafs = OrderedDict([
('instance_name', (YLeaf(YType.str, 'instance-name'), ['str'])),
])
self.instance_name = None
self.state = Platform.Racks.Rack.Slots.Slot.Instances.Instance.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._segment_path = lambda: "instance" + "[instance-name='" + str(self.instance_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks.Rack.Slots.Slot.Instances.Instance, ['instance_name'], name, value)
class State(Entity):
"""
State information
.. attribute:: card_type
Card type
**type**\: str
.. attribute:: card_redundancy_state
Redundancy state
**type**\: :py:class:`CardRedundancyState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.CardRedundancyState>`
.. attribute:: plim
PLIM
**type**\: str
.. attribute:: state
State
**type**\: :py:class:`NodeState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.NodeState>`
.. attribute:: is_monitored
True if power state is active
**type**\: bool
.. attribute:: is_powered
True if monitor state is active
**type**\: bool
.. attribute:: is_shutdown
True if shutdown state is active
**type**\: bool
.. attribute:: admin_state
Admin state
**type**\: str
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks.Rack.Slots.Slot.Instances.Instance.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('card_type', (YLeaf(YType.str, 'card-type'), ['str'])),
('card_redundancy_state', (YLeaf(YType.enumeration, 'card-redundancy-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'CardRedundancyState', '')])),
('plim', (YLeaf(YType.str, 'plim'), ['str'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'NodeState', '')])),
('is_monitored', (YLeaf(YType.boolean, 'is-monitored'), ['bool'])),
('is_powered', (YLeaf(YType.boolean, 'is-powered'), ['bool'])),
('is_shutdown', (YLeaf(YType.boolean, 'is-shutdown'), ['bool'])),
('admin_state', (YLeaf(YType.str, 'admin-state'), ['str'])),
])
self.card_type = None
self.card_redundancy_state = None
self.plim = None
self.state = None
self.is_monitored = None
self.is_powered = None
self.is_shutdown = None
self.admin_state = None
self._segment_path = lambda: "state"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks.Rack.Slots.Slot.Instances.Instance.State, ['card_type', 'card_redundancy_state', 'plim', 'state', 'is_monitored', 'is_powered', 'is_shutdown', 'admin_state'], name, value)
class Vm(Entity):
"""
VM information
.. attribute:: node_description
Node Type
**type**\: str
.. attribute:: red_role
Node Redundency Role
**type**\: str
.. attribute:: partner_name
Partner Name
**type**\: str
.. attribute:: software_status
SW status
**type**\: str
.. attribute:: node_ip
Node IP Address
**type**\: str
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks.Rack.Slots.Slot.Vm, self).__init__()
self.yang_name = "vm"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_description', (YLeaf(YType.str, 'node-description'), ['str'])),
('red_role', (YLeaf(YType.str, 'red-role'), ['str'])),
('partner_name', (YLeaf(YType.str, 'partner-name'), ['str'])),
('software_status', (YLeaf(YType.str, 'software-status'), ['str'])),
('node_ip', (YLeaf(YType.str, 'node-ip'), ['str'])),
])
self.node_description = None
self.red_role = None
self.partner_name = None
self.software_status = None
self.node_ip = None
self._segment_path = lambda: "vm"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks.Rack.Slots.Slot.Vm, ['node_description', 'red_role', 'partner_name', 'software_status', 'node_ip'], name, value)
class State(Entity):
"""
State information
.. attribute:: card_type
Card type
**type**\: str
.. attribute:: card_redundancy_state
Redundancy state
**type**\: :py:class:`CardRedundancyState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.CardRedundancyState>`
.. attribute:: plim
PLIM
**type**\: str
.. attribute:: state
State
**type**\: :py:class:`NodeState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.NodeState>`
.. attribute:: is_monitored
True if power state is active
**type**\: bool
.. attribute:: is_powered
True if monitor state is active
**type**\: bool
.. attribute:: is_shutdown
True if shutdown state is active
**type**\: bool
.. attribute:: admin_state
Admin state
**type**\: str
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(Platform.Racks.Rack.Slots.Slot.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('card_type', (YLeaf(YType.str, 'card-type'), ['str'])),
('card_redundancy_state', (YLeaf(YType.enumeration, 'card-redundancy-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'CardRedundancyState', '')])),
('plim', (YLeaf(YType.str, 'plim'), ['str'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'NodeState', '')])),
('is_monitored', (YLeaf(YType.boolean, 'is-monitored'), ['bool'])),
('is_powered', (YLeaf(YType.boolean, 'is-powered'), ['bool'])),
('is_shutdown', (YLeaf(YType.boolean, 'is-shutdown'), ['bool'])),
('admin_state', (YLeaf(YType.str, 'admin-state'), ['str'])),
])
self.card_type = None
self.card_redundancy_state = None
self.plim = None
self.state = None
self.is_monitored = None
self.is_powered = None
self.is_shutdown = None
self.admin_state = None
self._segment_path = lambda: "state"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Platform.Racks.Rack.Slots.Slot.State, ['card_type', 'card_redundancy_state', 'plim', 'state', 'is_monitored', 'is_powered', 'is_shutdown', 'admin_state'], name, value)
def clone_ptr(self):
self._top_entity = Platform()
return self._top_entity
class PlatformInventory(Entity):
"""
platform inventory
.. attribute:: racks
Table of racks
**type**\: :py:class:`Racks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory, self).__init__()
self._top_entity = None
self.yang_name = "platform-inventory"
self.yang_parent_name = "Cisco-IOS-XR-plat-chas-invmgr-ng-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("racks", ("racks", PlatformInventory.Racks))])
self._leafs = OrderedDict()
self.racks = PlatformInventory.Racks()
self.racks.parent = self
self._children_name_map["racks"] = "racks"
self._segment_path = lambda: "Cisco-IOS-XR-plat-chas-invmgr-ng-oper:platform-inventory"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory, [], name, value)
class Racks(Entity):
"""
Table of racks
.. attribute:: rack
Rack name
**type**\: list of :py:class:`Rack <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks, self).__init__()
self.yang_name = "racks"
self.yang_parent_name = "platform-inventory"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack", ("rack", PlatformInventory.Racks.Rack))])
self._leafs = OrderedDict()
self.rack = YList(self)
self._segment_path = lambda: "racks"
self._absolute_path = lambda: "Cisco-IOS-XR-plat-chas-invmgr-ng-oper:platform-inventory/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks, [], name, value)
class Rack(Entity):
"""
Rack name
.. attribute:: name (key)
Rack name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: slots
Table of slots
**type**\: :py:class:`Slots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack, self).__init__()
self.yang_name = "rack"
self.yang_parent_name = "racks"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("slots", ("slots", PlatformInventory.Racks.Rack.Slots)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.slots = PlatformInventory.Racks.Rack.Slots()
self.slots.parent = self
self._children_name_map["slots"] = "slots"
self.attributes = PlatformInventory.Racks.Rack.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "rack" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-plat-chas-invmgr-ng-oper:platform-inventory/racks/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack, ['name'], name, value)
class Slots(Entity):
"""
Table of slots
.. attribute:: slot
Slot name
**type**\: list of :py:class:`Slot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots, self).__init__()
self.yang_name = "slots"
self.yang_parent_name = "rack"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("slot", ("slot", PlatformInventory.Racks.Rack.Slots.Slot))])
self._leafs = OrderedDict()
self.slot = YList(self)
self._segment_path = lambda: "slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots, [], name, value)
class Slot(Entity):
"""
Slot name
.. attribute:: name (key)
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: cards
Table of cards
**type**\: :py:class:`Cards <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot, self).__init__()
self.yang_name = "slot"
self.yang_parent_name = "slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("cards", ("cards", PlatformInventory.Racks.Rack.Slots.Slot.Cards)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.cards = PlatformInventory.Racks.Rack.Slots.Slot.Cards()
self.cards.parent = self
self._children_name_map["cards"] = "cards"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "slot" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot, ['name'], name, value)
class Cards(Entity):
"""
Table of cards
.. attribute:: card
Card number
**type**\: list of :py:class:`Card <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards, self).__init__()
self.yang_name = "cards"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("card", ("card", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card))])
self._leafs = OrderedDict()
self.card = YList(self)
self._segment_path = lambda: "cards"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards, [], name, value)
class Card(Entity):
"""
Card number
.. attribute:: name (key)
Card name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: hardware_information
HardwareInformationDir
**type**\: :py:class:`HardwareInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation>`
.. attribute:: sub_slots
Table of subslots
**type**\: :py:class:`SubSlots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots>`
.. attribute:: portses
Table of port slots
**type**\: :py:class:`Portses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses>`
.. attribute:: port_slots
Table of port slots
**type**\: :py:class:`PortSlots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots>`
.. attribute:: hw_components
Table of HW components
**type**\: :py:class:`HwComponents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents>`
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card, self).__init__()
self.yang_name = "card"
self.yang_parent_name = "cards"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("hardware-information", ("hardware_information", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation)), ("sub-slots", ("sub_slots", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots)), ("portses", ("portses", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses)), ("port-slots", ("port_slots", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots)), ("hw-components", ("hw_components", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents)), ("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.hardware_information = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation()
self.hardware_information.parent = self
self._children_name_map["hardware_information"] = "hardware-information"
self.sub_slots = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots()
self.sub_slots.parent = self
self._children_name_map["sub_slots"] = "sub-slots"
self.portses = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses()
self.portses.parent = self
self._children_name_map["portses"] = "portses"
self.port_slots = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots()
self.port_slots.parent = self
self._children_name_map["port_slots"] = "port-slots"
self.hw_components = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents()
self.hw_components.parent = self
self._children_name_map["hw_components"] = "hw-components"
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "card" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card, ['name'], name, value)
class HardwareInformation(Entity):
"""
HardwareInformationDir
.. attribute:: processor_information
ProcesorInformation
**type**\: :py:class:`ProcessorInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.ProcessorInformation>`
.. attribute:: motherboard_information
MotherboardInformation
**type**\: :py:class:`MotherboardInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation>`
.. attribute:: bootflash_information
BootflashInformation
**type**\: :py:class:`BootflashInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.BootflashInformation>`
.. attribute:: disk_information
DiskInformation
**type**\: :py:class:`DiskInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation, self).__init__()
self.yang_name = "hardware-information"
self.yang_parent_name = "card"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("processor-information", ("processor_information", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.ProcessorInformation)), ("motherboard-information", ("motherboard_information", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation)), ("bootflash-information", ("bootflash_information", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.BootflashInformation)), ("disk-information", ("disk_information", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation))])
self._leafs = OrderedDict()
self.processor_information = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.ProcessorInformation()
self.processor_information.parent = self
self._children_name_map["processor_information"] = "processor-information"
self.motherboard_information = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation()
self.motherboard_information.parent = self
self._children_name_map["motherboard_information"] = "motherboard-information"
self.bootflash_information = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.BootflashInformation()
self.bootflash_information.parent = self
self._children_name_map["bootflash_information"] = "bootflash-information"
self.disk_information = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation()
self.disk_information.parent = self
self._children_name_map["disk_information"] = "disk-information"
self._segment_path = lambda: "hardware-information"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation, [], name, value)
class ProcessorInformation(Entity):
"""
ProcesorInformation
.. attribute:: processor_type
Type e.g. 7457
**type**\: str
**length:** 0..255
.. attribute:: speed
Speed e.g. 1197Mhz
**type**\: str
**length:** 0..255
.. attribute:: revision
Revision. e.g 1.1
**type**\: str
**length:** 0..255
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.ProcessorInformation, self).__init__()
self.yang_name = "processor-information"
self.yang_parent_name = "hardware-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('processor_type', (YLeaf(YType.str, 'processor-type'), ['str'])),
('speed', (YLeaf(YType.str, 'speed'), ['str'])),
('revision', (YLeaf(YType.str, 'revision'), ['str'])),
])
self.processor_type = None
self.speed = None
self.revision = None
self._segment_path = lambda: "processor-information"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.ProcessorInformation, [u'processor_type', u'speed', u'revision'], name, value)
class MotherboardInformation(Entity):
"""
MotherboardInformation
.. attribute:: rom
ROM information
**type**\: :py:class:`Rom <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Rom>`
.. attribute:: bootflash
Bootflash information
**type**\: :py:class:`Bootflash <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Bootflash>`
.. attribute:: processor
Processor information
**type**\: :py:class:`Processor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Processor>`
.. attribute:: main_memory_size
Memory size in bytes
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: nvram_size
NVRAM size in bytes
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation, self).__init__()
self.yang_name = "motherboard-information"
self.yang_parent_name = "hardware-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("rom", ("rom", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Rom)), ("bootflash", ("bootflash", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Bootflash)), ("processor", ("processor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Processor))])
self._leafs = OrderedDict([
('main_memory_size', (YLeaf(YType.uint64, 'main-memory-size'), ['int'])),
('nvram_size', (YLeaf(YType.uint64, 'nvram-size'), ['int'])),
])
self.main_memory_size = None
self.nvram_size = None
self.rom = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Rom()
self.rom.parent = self
self._children_name_map["rom"] = "rom"
self.bootflash = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Bootflash()
self.bootflash.parent = self
self._children_name_map["bootflash"] = "bootflash"
self.processor = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Processor()
self.processor.parent = self
self._children_name_map["processor"] = "processor"
self._segment_path = lambda: "motherboard-information"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation, [u'main_memory_size', u'nvram_size'], name, value)
class Rom(Entity):
"""
ROM information
.. attribute:: image_name
Image name
**type**\: str
**length:** 0..255
.. attribute:: major_version
Major version
**type**\: int
**range:** 0..4294967295
.. attribute:: minor_version
Minor version
**type**\: int
**range:** 0..4294967295
.. attribute:: micro_image_version
Micro image version
**type**\: str
**length:** 0..255
.. attribute:: platform_specific
Platform specific text
**type**\: str
**length:** 0..255
.. attribute:: release_type
Release type
**type**\: str
**length:** 0..255
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Rom, self).__init__()
self.yang_name = "rom"
self.yang_parent_name = "motherboard-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('image_name', (YLeaf(YType.str, 'image-name'), ['str'])),
('major_version', (YLeaf(YType.uint32, 'major-version'), ['int'])),
('minor_version', (YLeaf(YType.uint32, 'minor-version'), ['int'])),
('micro_image_version', (YLeaf(YType.str, 'micro-image-version'), ['str'])),
('platform_specific', (YLeaf(YType.str, 'platform-specific'), ['str'])),
('release_type', (YLeaf(YType.str, 'release-type'), ['str'])),
])
self.image_name = None
self.major_version = None
self.minor_version = None
self.micro_image_version = None
self.platform_specific = None
self.release_type = None
self._segment_path = lambda: "rom"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Rom, [u'image_name', u'major_version', u'minor_version', u'micro_image_version', u'platform_specific', u'release_type'], name, value)
class Bootflash(Entity):
"""
Bootflash information
.. attribute:: image_name
Image name
**type**\: str
**length:** 0..255
.. attribute:: platform_type
Platform Type
**type**\: str
**length:** 0..255
.. attribute:: major_version
Major version
**type**\: int
**range:** 0..4294967295
.. attribute:: minor_version
Minor version
**type**\: int
**range:** 0..4294967295
.. attribute:: micro_image_version
Micro image version
**type**\: str
**length:** 0..255
.. attribute:: platform_specific
Platform specific text
**type**\: str
**length:** 0..255
.. attribute:: release_type
Release type
**type**\: str
**length:** 0..255
.. attribute:: bootflash_type
Bootflash type e.g. SIMM
**type**\: str
**length:** 0..255
.. attribute:: bootflash_size
Bootflash size in kilo\-bytes
**type**\: int
**range:** 0..4294967295
**units**\: kilobyte
.. attribute:: sector_size
Sector size in bytes
**type**\: int
**range:** 0..4294967295
**units**\: byte
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Bootflash, self).__init__()
self.yang_name = "bootflash"
self.yang_parent_name = "motherboard-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('image_name', (YLeaf(YType.str, 'image-name'), ['str'])),
('platform_type', (YLeaf(YType.str, 'platform-type'), ['str'])),
('major_version', (YLeaf(YType.uint32, 'major-version'), ['int'])),
('minor_version', (YLeaf(YType.uint32, 'minor-version'), ['int'])),
('micro_image_version', (YLeaf(YType.str, 'micro-image-version'), ['str'])),
('platform_specific', (YLeaf(YType.str, 'platform-specific'), ['str'])),
('release_type', (YLeaf(YType.str, 'release-type'), ['str'])),
('bootflash_type', (YLeaf(YType.str, 'bootflash-type'), ['str'])),
('bootflash_size', (YLeaf(YType.uint32, 'bootflash-size'), ['int'])),
('sector_size', (YLeaf(YType.uint32, 'sector-size'), ['int'])),
])
self.image_name = None
self.platform_type = None
self.major_version = None
self.minor_version = None
self.micro_image_version = None
self.platform_specific = None
self.release_type = None
self.bootflash_type = None
self.bootflash_size = None
self.sector_size = None
self._segment_path = lambda: "bootflash"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Bootflash, [u'image_name', u'platform_type', u'major_version', u'minor_version', u'micro_image_version', u'platform_specific', u'release_type', u'bootflash_type', u'bootflash_size', u'sector_size'], name, value)
class Processor(Entity):
"""
Processor information
.. attribute:: processor_type
Type e.g. 7457
**type**\: str
**length:** 0..255
.. attribute:: speed
Speed e.g. 1197Mhz
**type**\: str
**length:** 0..255
.. attribute:: revision
Revision. e.g 1.1
**type**\: str
**length:** 0..255
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Processor, self).__init__()
self.yang_name = "processor"
self.yang_parent_name = "motherboard-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('processor_type', (YLeaf(YType.str, 'processor-type'), ['str'])),
('speed', (YLeaf(YType.str, 'speed'), ['str'])),
('revision', (YLeaf(YType.str, 'revision'), ['str'])),
])
self.processor_type = None
self.speed = None
self.revision = None
self._segment_path = lambda: "processor"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.MotherboardInformation.Processor, [u'processor_type', u'speed', u'revision'], name, value)
class BootflashInformation(Entity):
"""
BootflashInformation
.. attribute:: image_name
Image name
**type**\: str
**length:** 0..255
.. attribute:: platform_type
Platform Type
**type**\: str
**length:** 0..255
.. attribute:: major_version
Major version
**type**\: int
**range:** 0..4294967295
.. attribute:: minor_version
Minor version
**type**\: int
**range:** 0..4294967295
.. attribute:: micro_image_version
Micro image version
**type**\: str
**length:** 0..255
.. attribute:: platform_specific
Platform specific text
**type**\: str
**length:** 0..255
.. attribute:: release_type
Release type
**type**\: str
**length:** 0..255
.. attribute:: bootflash_type
Bootflash type e.g. SIMM
**type**\: str
**length:** 0..255
.. attribute:: bootflash_size
Bootflash size in kilo\-bytes
**type**\: int
**range:** 0..4294967295
**units**\: kilobyte
.. attribute:: sector_size
Sector size in bytes
**type**\: int
**range:** 0..4294967295
**units**\: byte
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.BootflashInformation, self).__init__()
self.yang_name = "bootflash-information"
self.yang_parent_name = "hardware-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('image_name', (YLeaf(YType.str, 'image-name'), ['str'])),
('platform_type', (YLeaf(YType.str, 'platform-type'), ['str'])),
('major_version', (YLeaf(YType.uint32, 'major-version'), ['int'])),
('minor_version', (YLeaf(YType.uint32, 'minor-version'), ['int'])),
('micro_image_version', (YLeaf(YType.str, 'micro-image-version'), ['str'])),
('platform_specific', (YLeaf(YType.str, 'platform-specific'), ['str'])),
('release_type', (YLeaf(YType.str, 'release-type'), ['str'])),
('bootflash_type', (YLeaf(YType.str, 'bootflash-type'), ['str'])),
('bootflash_size', (YLeaf(YType.uint32, 'bootflash-size'), ['int'])),
('sector_size', (YLeaf(YType.uint32, 'sector-size'), ['int'])),
])
self.image_name = None
self.platform_type = None
self.major_version = None
self.minor_version = None
self.micro_image_version = None
self.platform_specific = None
self.release_type = None
self.bootflash_type = None
self.bootflash_size = None
self.sector_size = None
self._segment_path = lambda: "bootflash-information"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.BootflashInformation, [u'image_name', u'platform_type', u'major_version', u'minor_version', u'micro_image_version', u'platform_specific', u'release_type', u'bootflash_type', u'bootflash_size', u'sector_size'], name, value)
class DiskInformation(Entity):
"""
DiskInformation
.. attribute:: disk_name
(Deprecated) Disk name
**type**\: str
**length:** 0..255
.. attribute:: disk_size
(Deprecated) Disk size in mega\-bytes
**type**\: int
**range:** 0..4294967295
**units**\: megabyte
.. attribute:: sector_size
(Deprecated) Disk sector size in bytes
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: disks
Disk attributes
**type**\: list of :py:class:`Disks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation.Disks>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation, self).__init__()
self.yang_name = "disk-information"
self.yang_parent_name = "hardware-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("disks", ("disks", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation.Disks))])
self._leafs = OrderedDict([
('disk_name', (YLeaf(YType.str, 'disk-name'), ['str'])),
('disk_size', (YLeaf(YType.uint32, 'disk-size'), ['int'])),
('sector_size', (YLeaf(YType.uint32, 'sector-size'), ['int'])),
])
self.disk_name = None
self.disk_size = None
self.sector_size = None
self.disks = YList(self)
self._segment_path = lambda: "disk-information"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation, [u'disk_name', u'disk_size', u'sector_size'], name, value)
class Disks(Entity):
"""
Disk attributes
.. attribute:: disk_name
Disk name
**type**\: str
**length:** 0..255
.. attribute:: disk_size
Disk size in mega\-bytes
**type**\: int
**range:** 0..4294967295
**units**\: megabyte
.. attribute:: sector_size
Disk sector size in bytes
**type**\: int
**range:** 0..4294967295
**units**\: byte
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation.Disks, self).__init__()
self.yang_name = "disks"
self.yang_parent_name = "disk-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('disk_name', (YLeaf(YType.str, 'disk-name'), ['str'])),
('disk_size', (YLeaf(YType.uint32, 'disk-size'), ['int'])),
('sector_size', (YLeaf(YType.uint32, 'sector-size'), ['int'])),
])
self.disk_name = None
self.disk_size = None
self.sector_size = None
self._segment_path = lambda: "disks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HardwareInformation.DiskInformation.Disks, [u'disk_name', u'disk_size', u'sector_size'], name, value)
class SubSlots(Entity):
"""
Table of subslots
.. attribute:: sub_slot
Subslot number
**type**\: list of :py:class:`SubSlot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots, self).__init__()
self.yang_name = "sub-slots"
self.yang_parent_name = "card"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sub-slot", ("sub_slot", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot))])
self._leafs = OrderedDict()
self.sub_slot = YList(self)
self._segment_path = lambda: "sub-slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots, [], name, value)
class SubSlot(Entity):
"""
Subslot number
.. attribute:: name (key)
Subslot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: module
Module of a subslot
**type**\: :py:class:`Module <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot, self).__init__()
self.yang_name = "sub-slot"
self.yang_parent_name = "sub-slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("module", ("module", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.module = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module()
self.module.parent = self
self._children_name_map["module"] = "module"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sub-slot" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot, ['name'], name, value)
class Module(Entity):
"""
Module of a subslot
.. attribute:: port_slots
Table of port slots
**type**\: :py:class:`PortSlots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots>`
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module, self).__init__()
self.yang_name = "module"
self.yang_parent_name = "sub-slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("port-slots", ("port_slots", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots)), ("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes))])
self._leafs = OrderedDict()
self.port_slots = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots()
self.port_slots.parent = self
self._children_name_map["port_slots"] = "port-slots"
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "module"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module, [], name, value)
class PortSlots(Entity):
"""
Table of port slots
.. attribute:: port_slot
Port slot number
**type**\: list of :py:class:`PortSlot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots, self).__init__()
self.yang_name = "port-slots"
self.yang_parent_name = "module"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("port-slot", ("port_slot", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot))])
self._leafs = OrderedDict()
self.port_slot = YList(self)
self._segment_path = lambda: "port-slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots, [], name, value)
class PortSlot(Entity):
"""
Port slot number
.. attribute:: name (key)
Port slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: portses
Table of port slots
**type**\: :py:class:`Portses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses>`
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot, self).__init__()
self.yang_name = "port-slot"
self.yang_parent_name = "port-slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("portses", ("portses", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses)), ("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.portses = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses()
self.portses.parent = self
self._children_name_map["portses"] = "portses"
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "port-slot" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot, ['name'], name, value)
class Portses(Entity):
"""
Table of port slots
.. attribute:: ports
Port number
**type**\: list of :py:class:`Ports <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses, self).__init__()
self.yang_name = "portses"
self.yang_parent_name = "port-slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ports", ("ports", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports))])
self._leafs = OrderedDict()
self.ports = YList(self)
self._segment_path = lambda: "portses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses, [], name, value)
class Ports(Entity):
"""
Port number
.. attribute:: name (key)
Port name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: hw_components
Table of HW components
**type**\: :py:class:`HwComponents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents>`
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports, self).__init__()
self.yang_name = "ports"
self.yang_parent_name = "portses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("hw-components", ("hw_components", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents)), ("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.hw_components = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents()
self.hw_components.parent = self
self._children_name_map["hw_components"] = "hw-components"
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "ports" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports, ['name'], name, value)
class HwComponents(Entity):
"""
Table of HW components
.. attribute:: hw_component
HW component number
**type**\: list of :py:class:`HwComponent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents, self).__init__()
self.yang_name = "hw-components"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("hw-component", ("hw_component", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent))])
self._leafs = OrderedDict()
self.hw_component = YList(self)
self._segment_path = lambda: "hw-components"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents, [], name, value)
class HwComponent(Entity):
"""
HW component number
.. attribute:: name (key)
HW component name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent, self).__init__()
self.yang_name = "hw-component"
self.yang_parent_name = "hw-components"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "hw-component" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent, ['name'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "hw-component"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "hw-component"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "port-slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "port-slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "module"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "module"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Module.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sub-slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.SubSlots.SubSlot.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Portses(Entity):
"""
Table of port slots
.. attribute:: ports
Port number
**type**\: list of :py:class:`Ports <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses, self).__init__()
self.yang_name = "portses"
self.yang_parent_name = "card"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ports", ("ports", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports))])
self._leafs = OrderedDict()
self.ports = YList(self)
self._segment_path = lambda: "portses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses, [], name, value)
class Ports(Entity):
"""
Port number
.. attribute:: name (key)
Port name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: hw_components
Table of HW components
**type**\: :py:class:`HwComponents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents>`
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports, self).__init__()
self.yang_name = "ports"
self.yang_parent_name = "portses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("hw-components", ("hw_components", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents)), ("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.hw_components = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents()
self.hw_components.parent = self
self._children_name_map["hw_components"] = "hw-components"
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "ports" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports, ['name'], name, value)
class HwComponents(Entity):
"""
Table of HW components
.. attribute:: hw_component
HW component number
**type**\: list of :py:class:`HwComponent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents, self).__init__()
self.yang_name = "hw-components"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("hw-component", ("hw_component", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent))])
self._leafs = OrderedDict()
self.hw_component = YList(self)
self._segment_path = lambda: "hw-components"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents, [], name, value)
class HwComponent(Entity):
"""
HW component number
.. attribute:: name (key)
HW component name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent, self).__init__()
self.yang_name = "hw-component"
self.yang_parent_name = "hw-components"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "hw-component" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent, ['name'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "hw-component"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "hw-component"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Portses.Ports.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class PortSlots(Entity):
"""
Table of port slots
.. attribute:: port_slot
Port slot number
**type**\: list of :py:class:`PortSlot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots, self).__init__()
self.yang_name = "port-slots"
self.yang_parent_name = "card"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("port-slot", ("port_slot", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot))])
self._leafs = OrderedDict()
self.port_slot = YList(self)
self._segment_path = lambda: "port-slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots, [], name, value)
class PortSlot(Entity):
"""
Port slot number
.. attribute:: name (key)
Port slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: portses
Table of port slots
**type**\: :py:class:`Portses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses>`
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot, self).__init__()
self.yang_name = "port-slot"
self.yang_parent_name = "port-slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("portses", ("portses", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses)), ("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.portses = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses()
self.portses.parent = self
self._children_name_map["portses"] = "portses"
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "port-slot" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot, ['name'], name, value)
class Portses(Entity):
"""
Table of port slots
.. attribute:: ports
Port number
**type**\: list of :py:class:`Ports <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses, self).__init__()
self.yang_name = "portses"
self.yang_parent_name = "port-slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ports", ("ports", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports))])
self._leafs = OrderedDict()
self.ports = YList(self)
self._segment_path = lambda: "portses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses, [], name, value)
class Ports(Entity):
"""
Port number
.. attribute:: name (key)
Port name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: hw_components
Table of HW components
**type**\: :py:class:`HwComponents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents>`
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports, self).__init__()
self.yang_name = "ports"
self.yang_parent_name = "portses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("hw-components", ("hw_components", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents)), ("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.hw_components = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents()
self.hw_components.parent = self
self._children_name_map["hw_components"] = "hw-components"
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "ports" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports, ['name'], name, value)
class HwComponents(Entity):
"""
Table of HW components
.. attribute:: hw_component
HW component number
**type**\: list of :py:class:`HwComponent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents, self).__init__()
self.yang_name = "hw-components"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("hw-component", ("hw_component", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent))])
self._leafs = OrderedDict()
self.hw_component = YList(self)
self._segment_path = lambda: "hw-components"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents, [], name, value)
class HwComponent(Entity):
"""
HW component number
.. attribute:: name (key)
HW component name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent, self).__init__()
self.yang_name = "hw-component"
self.yang_parent_name = "hw-components"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "hw-component" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent, ['name'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "hw-component"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "hw-component"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Portses.Ports.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "port-slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "port-slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.PortSlots.PortSlot.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class HwComponents(Entity):
"""
Table of HW components
.. attribute:: hw_component
HW component number
**type**\: list of :py:class:`HwComponent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents, self).__init__()
self.yang_name = "hw-components"
self.yang_parent_name = "card"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("hw-component", ("hw_component", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent))])
self._leafs = OrderedDict()
self.hw_component = YList(self)
self._segment_path = lambda: "hw-components"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents, [], name, value)
class HwComponent(Entity):
"""
HW component number
.. attribute:: name (key)
HW component name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: sensors
Table of sensors
**type**\: :py:class:`Sensors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors>`
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent, self).__init__()
self.yang_name = "hw-component"
self.yang_parent_name = "hw-components"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("sensors", ("sensors", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors)), ("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.sensors = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors()
self.sensors.parent = self
self._children_name_map["sensors"] = "sensors"
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "hw-component" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent, ['name'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "hw-component"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "hw-component"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.HwComponents.HwComponent.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Sensors(Entity):
"""
Table of sensors
.. attribute:: sensor
Sensor number
**type**\: list of :py:class:`Sensor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors, self).__init__()
self.yang_name = "sensors"
self.yang_parent_name = "card"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor", ("sensor", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor))])
self._leafs = OrderedDict()
self.sensor = YList(self)
self._segment_path = lambda: "sensors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors, [], name, value)
class Sensor(Entity):
"""
Sensor number
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: attributes
Attributes
**type**\: :py:class:`Attributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor, self).__init__()
self.yang_name = "sensor"
self.yang_parent_name = "sensors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("attributes", ("attributes", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.attributes = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes()
self.attributes.parent = self
self._children_name_map["attributes"] = "attributes"
self._segment_path = lambda: "sensor" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor, ['name'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "sensor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Sensors.Sensor.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "card"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Cards.Card.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Slots.Slot.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Slots.Slot.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Slots.Slot.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class Attributes(Entity):
"""
Attributes
.. attribute:: basic_info
Entity attributes
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Attributes.BasicInfo>`
.. attribute:: fru_info
Field Replaceable Unit (FRU) attributes
**type**\: :py:class:`FruInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Attributes.FruInfo>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Attributes, self).__init__()
self.yang_name = "attributes"
self.yang_parent_name = "rack"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-info", ("basic_info", PlatformInventory.Racks.Rack.Attributes.BasicInfo)), ("fru-info", ("fru_info", PlatformInventory.Racks.Rack.Attributes.FruInfo))])
self._leafs = OrderedDict()
self.basic_info = PlatformInventory.Racks.Rack.Attributes.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.fru_info = PlatformInventory.Racks.Rack.Attributes.FruInfo()
self.fru_info.parent = self
self._children_name_map["fru_info"] = "fru-info"
self._segment_path = lambda: "attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Attributes, [], name, value)
class BasicInfo(Entity):
"""
Entity attributes
.. attribute:: name
name string for the entity
**type**\: str
**length:** 0..255
.. attribute:: description
describes in user\-readable terms what the entity in question does
**type**\: str
**length:** 0..255
.. attribute:: model_name
model name
**type**\: str
**length:** 0..255
.. attribute:: hardware_revision
hw revision string
**type**\: str
**length:** 0..255
.. attribute:: serial_number
serial number
**type**\: str
**length:** 0..255
.. attribute:: firmware_revision
firmware revision string
**type**\: str
**length:** 0..255
.. attribute:: software_revision
software revision string
**type**\: str
**length:** 0..255
.. attribute:: vendor_type
maps to the vendor OID string
**type**\: str
**length:** 0..255
.. attribute:: is_field_replaceable_unit
1 if Field Replaceable Unit 0, if not
**type**\: bool
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Attributes.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('model_name', (YLeaf(YType.str, 'model-name'), ['str'])),
('hardware_revision', (YLeaf(YType.str, 'hardware-revision'), ['str'])),
('serial_number', (YLeaf(YType.str, 'serial-number'), ['str'])),
('firmware_revision', (YLeaf(YType.str, 'firmware-revision'), ['str'])),
('software_revision', (YLeaf(YType.str, 'software-revision'), ['str'])),
('vendor_type', (YLeaf(YType.str, 'vendor-type'), ['str'])),
('is_field_replaceable_unit', (YLeaf(YType.boolean, 'is-field-replaceable-unit'), ['bool'])),
])
self.name = None
self.description = None
self.model_name = None
self.hardware_revision = None
self.serial_number = None
self.firmware_revision = None
self.software_revision = None
self.vendor_type = None
self.is_field_replaceable_unit = None
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Attributes.BasicInfo, [u'name', u'description', u'model_name', u'hardware_revision', u'serial_number', u'firmware_revision', u'software_revision', u'vendor_type', u'is_field_replaceable_unit'], name, value)
class FruInfo(Entity):
"""
Field Replaceable Unit (FRU) attributes
.. attribute:: last_operational_state_change
Time operational state is last changed
**type**\: :py:class:`LastOperationalStateChange <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Attributes.FruInfo.LastOperationalStateChange>`
.. attribute:: module_up_time
Module up time
**type**\: :py:class:`ModuleUpTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.PlatformInventory.Racks.Rack.Attributes.FruInfo.ModuleUpTime>`
.. attribute:: module_administrative_state
Administrative state
**type**\: :py:class:`InvAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvAdminState>`
.. attribute:: module_power_administrative_state
Power administrative state
**type**\: :py:class:`InvPowerAdminState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvPowerAdminState>`
.. attribute:: module_operational_state
Operation state
**type**\: :py:class:`InvCardState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvCardState>`
.. attribute:: module_monitor_state
Monitor state
**type**\: :py:class:`InvMonitorState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvMonitorState>`
.. attribute:: module_reset_reason
Reset reason
**type**\: :py:class:`InvResetReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper.InvResetReason>`
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Attributes.FruInfo, self).__init__()
self.yang_name = "fru-info"
self.yang_parent_name = "attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("last-operational-state-change", ("last_operational_state_change", PlatformInventory.Racks.Rack.Attributes.FruInfo.LastOperationalStateChange)), ("module-up-time", ("module_up_time", PlatformInventory.Racks.Rack.Attributes.FruInfo.ModuleUpTime))])
self._leafs = OrderedDict([
('module_administrative_state', (YLeaf(YType.enumeration, 'module-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvAdminState', '')])),
('module_power_administrative_state', (YLeaf(YType.enumeration, 'module-power-administrative-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvPowerAdminState', '')])),
('module_operational_state', (YLeaf(YType.enumeration, 'module-operational-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvCardState', '')])),
('module_monitor_state', (YLeaf(YType.enumeration, 'module-monitor-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvMonitorState', '')])),
('module_reset_reason', (YLeaf(YType.enumeration, 'module-reset-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_plat_chas_invmgr_ng_oper', 'InvResetReason', '')])),
])
self.module_administrative_state = None
self.module_power_administrative_state = None
self.module_operational_state = None
self.module_monitor_state = None
self.module_reset_reason = None
self.last_operational_state_change = PlatformInventory.Racks.Rack.Attributes.FruInfo.LastOperationalStateChange()
self.last_operational_state_change.parent = self
self._children_name_map["last_operational_state_change"] = "last-operational-state-change"
self.module_up_time = PlatformInventory.Racks.Rack.Attributes.FruInfo.ModuleUpTime()
self.module_up_time.parent = self
self._children_name_map["module_up_time"] = "module-up-time"
self._segment_path = lambda: "fru-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Attributes.FruInfo, [u'module_administrative_state', u'module_power_administrative_state', u'module_operational_state', u'module_monitor_state', u'module_reset_reason'], name, value)
class LastOperationalStateChange(Entity):
"""
Time operational state is last changed
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Attributes.FruInfo.LastOperationalStateChange, self).__init__()
self.yang_name = "last-operational-state-change"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "last-operational-state-change"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Attributes.FruInfo.LastOperationalStateChange, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
class ModuleUpTime(Entity):
"""
Module up time
.. attribute:: time_in_seconds
Time Value in Seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: second
.. attribute:: time_in_nano_seconds
Time Value in Nano\-seconds
**type**\: int
**range:** \-2147483648..2147483647
**units**\: nanosecond
"""
_prefix = 'plat-chas-invmgr-ng-oper'
_revision = '2018-01-22'
def __init__(self):
super(PlatformInventory.Racks.Rack.Attributes.FruInfo.ModuleUpTime, self).__init__()
self.yang_name = "module-up-time"
self.yang_parent_name = "fru-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_seconds', (YLeaf(YType.int32, 'time-in-seconds'), ['int'])),
('time_in_nano_seconds', (YLeaf(YType.int32, 'time-in-nano-seconds'), ['int'])),
])
self.time_in_seconds = None
self.time_in_nano_seconds = None
self._segment_path = lambda: "module-up-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PlatformInventory.Racks.Rack.Attributes.FruInfo.ModuleUpTime, [u'time_in_seconds', u'time_in_nano_seconds'], name, value)
def clone_ptr(self):
self._top_entity = PlatformInventory()
return self._top_entity
| 74.539836
| 746
| 0.337012
| 55,356
| 946,805
| 5.482224
| 0.007009
| 0.027343
| 0.029986
| 0.052136
| 0.969763
| 0.961984
| 0.956049
| 0.953103
| 0.952134
| 0.950414
| 0
| 0.013882
| 0.595995
| 946,805
| 12,701
| 747
| 74.545705
| 0.779485
| 0.144453
| 0
| 0.817267
| 0
| 0.000604
| 0.112135
| 0.048493
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078487
| false
| 0.000403
| 0.001006
| 0
| 0.142685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
295ce479ef7a3c20744f0b55c560277ad1dbb51c
| 7,664
|
py
|
Python
|
api/test_apiutils.py
|
jbalint/aurum-datadiscovery
|
443b777c13be47e6da60b4af528e19a02608e158
|
[
"MIT"
] | 60
|
2017-01-13T19:53:34.000Z
|
2022-01-06T07:00:43.000Z
|
api/test_apiutils.py
|
jbalint/aurum-datadiscovery
|
443b777c13be47e6da60b4af528e19a02608e158
|
[
"MIT"
] | 83
|
2016-10-08T14:07:55.000Z
|
2022-03-08T22:03:39.000Z
|
api/test_apiutils.py
|
jbalint/aurum-datadiscovery
|
443b777c13be47e6da60b4af528e19a02608e158
|
[
"MIT"
] | 44
|
2016-10-08T00:15:46.000Z
|
2021-12-31T02:42:45.000Z
|
import unittest
from api.apiutils import DRS
from api.apiutils import Operation
from api.apiutils import OP
from api.apiutils import Hit
from ddapi import API
class TestApiutils(unittest.TestCase):
api = API(None)
def test_drs_field_iteration(self):
print(self._testMethodName)
h1 = Hit(0, "dba", "table_a", "a", -1)
h2 = Hit(1, "dba", "table_a", "b", -1)
h3 = Hit(2, "dba", "table_b", "c", -1)
h4 = Hit(3, "dba", "table_b", "d", -1)
drs = DRS([h1, h2, h3, h4], Operation(OP.ORIGIN))
drs.set_fields_mode()
for el in drs:
print(str(el))
self.assertTrue(True)
def test_drs_table_iteration(self):
print(self._testMethodName)
h1 = Hit(0, "dba", "table_a", "a", -1)
h2 = Hit(1, "dba", "table_a", "b", -1)
h3 = Hit(2, "dba", "table_b", "c", -1)
h4 = Hit(3, "dba", "table_b", "d", -1)
drs = DRS([h1, h2, h3, h4], Operation(OP.ORIGIN))
drs.set_table_mode()
for el in drs:
print(str(el))
self.assertTrue(True)
def test_creation_initial_provenance(self):
print(self._testMethodName)
h0 = Hit(10, "dba", "table_c", "v", -1)
h1 = Hit(0, "dba", "table_a", "a", -1)
h2 = Hit(1, "dba", "table_a", "b", -1)
h3 = Hit(2, "dba", "table_b", "c", -1)
h4 = Hit(3, "dba", "table_b", "d", -1)
drs = DRS([h1, h2, h3, h4], Operation(OP.CONTENT_SIM, params=[h0]))
prov_graph = drs.get_provenance().prov_graph()
nodes = prov_graph.nodes()
print("NODES")
for n in nodes:
print(str(n))
print(" ")
edges = prov_graph.edges(keys=True)
print("EDGES")
for e in edges:
print(str(e))
print(" ")
self.assertTrue(True)
def test_absorb_provenance(self):
print(self._testMethodName)
# DRS 1
h0 = Hit(10, "dba", "table_c", "v", -1)
h1 = Hit(0, "dba", "table_a", "a", -1)
h2 = Hit(1, "dba", "table_a", "b", -1)
h3 = Hit(2, "dba", "table_b", "c", -1)
h4 = Hit(3, "dba", "table_b", "d", -1)
drs1 = DRS([h1, h2, h3, h4], Operation(OP.CONTENT_SIM, params=[h0]))
# DRS 2
h5 = Hit(1, "dba", "table_a", "b", -1)
h6 = Hit(16, "dba", "table_d", "a", -1)
h7 = Hit(17, "dba", "table_d", "b", -1)
drs2 = DRS([h6, h7], Operation(OP.SCHEMA_SIM, params=[h5]))
drs = drs1.absorb_provenance(drs2)
prov_graph = drs.get_provenance().prov_graph()
nodes = prov_graph.nodes()
print("NODES")
for n in nodes:
print(str(n))
print(" ")
edges = prov_graph.edges(keys=True)
print("EDGES")
for e in edges:
print(str(e))
print(" ")
init_data = set([x for x in drs1])
merged_data = set([x for x in drs])
new_data = init_data - merged_data
print("Len must be 0: " + str(len(new_data)))
self.assertTrue(len(new_data) == 0)
def test_absorb(self):
print(self._testMethodName)
# DRS 1
h0 = Hit(10, "dba", "table_c", "v", -1)
h1 = Hit(0, "dba", "table_a", "a", -1)
h2 = Hit(1, "dba", "table_a", "b", -1)
h3 = Hit(2, "dba", "table_b", "c", -1)
h4 = Hit(3, "dba", "table_b", "d", -1)
drs1 = DRS([h1, h2, h3, h4], Operation(OP.CONTENT_SIM, params=[h0]))
# DRS 2
h5 = Hit(1, "dba", "table_a", "b", -1)
h6 = Hit(16, "dba", "table_d", "a", -1)
h7 = Hit(17, "dba", "table_d", "b", -1)
drs2 = DRS([h6, h7], Operation(OP.SCHEMA_SIM, params=[h5]))
drs = drs1.absorb(drs2)
prov_graph = drs.get_provenance().prov_graph()
nodes = prov_graph.nodes()
print("NODES")
for n in nodes:
print(str(n))
print(" ")
edges = prov_graph.edges(keys=True)
print("EDGES")
for e in edges:
print(str(e))
print(" ")
drs1_data = set([x for x in drs1])
drs2_data = set([x for x in drs2])
merged_data = set([x for x in drs])
lm = len(merged_data)
lu = len(drs1_data.union(drs2_data))
print("Len must be 0: " + str(lu - lm))
self.assertTrue((lu - lm) == 0)
def test_intersection(self):
print(self._testMethodName)
# DRS 1
h0 = Hit(10, "dba", "table_c", "v", -1)
h1 = Hit(0, "dba", "table_a", "a", -1)
h2 = Hit(1, "dba", "table_a", "b", -1)
h3 = Hit(2, "dba", "table_b", "c", -1)
h4 = Hit(3, "dba", "table_b", "d", -1)
drs1 = DRS([h0, h1, h2, h3, h4], Operation(OP.ORIGIN))
# DRS 2
h5 = Hit(1, "dba", "table_a", "b", -1)
h6 = Hit(16, "dba", "table_d", "a", -1)
h7 = Hit(17, "dba", "table_d", "b", -1)
drs2 = DRS([h5, h6, h7], Operation(OP.ORIGIN))
drs = drs1.intersection(drs2)
prov_graph = drs.get_provenance().prov_graph()
nodes = prov_graph.nodes()
print("NODES")
for n in nodes:
print(str(n))
print(" ")
edges = prov_graph.edges(keys=True)
print("EDGES")
for e in edges:
print(str(e))
print(" ")
data = [x for x in drs]
ld = len(data)
print("Len must be 1: " + str(ld))
self.assertTrue(ld == 1)
def test_union(self):
print(self._testMethodName)
# DRS 1
h0 = Hit(10, "dba", "table_c", "v", -1)
h1 = Hit(0, "dba", "table_a", "a", -1)
h2 = Hit(1, "dba", "table_a", "b", -1)
h3 = Hit(2, "dba", "table_b", "c", -1)
h4 = Hit(3, "dba", "table_b", "d", -1)
drs1 = DRS([h0, h1, h2, h3, h4], Operation(OP.ORIGIN))
# DRS 2
h5 = Hit(1, "dba", "table_a", "b", -1)
h6 = Hit(16, "dba", "table_d", "a", -1)
h7 = Hit(17, "dba", "table_d", "b", -1)
drs2 = DRS([h5, h6, h7], Operation(OP.ORIGIN))
drs = drs1.union(drs2)
prov_graph = drs.get_provenance().prov_graph()
nodes = prov_graph.nodes()
print("NODES")
for n in nodes:
print(str(n))
print(" ")
edges = prov_graph.edges(keys=True)
print("EDGES")
for e in edges:
print(str(e))
print(" ")
data = [x for x in drs]
ld = len(data)
print("Len must be 7: " + str(ld))
self.assertTrue(ld == 7)
def test_sdifference(self):
print(self._testMethodName)
# DRS 1
h0 = Hit(10, "dba", "table_c", "v", -1)
h1 = Hit(0, "dba", "table_a", "a", -1)
h2 = Hit(1, "dba", "table_a", "b", -1)
h3 = Hit(2, "dba", "table_b", "c", -1)
h4 = Hit(3, "dba", "table_b", "d", -1)
drs1 = DRS([h0, h1, h2, h3, h4], Operation(OP.ORIGIN))
# DRS 2
h5 = Hit(1, "dba", "table_a", "b", -1)
h6 = Hit(16, "dba", "table_d", "a", -1)
h7 = Hit(17, "dba", "table_d", "b", -1)
drs2 = DRS([h5, h6, h7], Operation(OP.ORIGIN))
drs = drs1.set_difference(drs2)
prov_graph = drs.get_provenance().prov_graph()
nodes = prov_graph.nodes()
print("NODES")
for n in nodes:
print(str(n))
print(" ")
edges = prov_graph.edges(keys=True)
print("EDGES")
for e in edges:
print(str(e))
print(" ")
data = [x for x in drs]
ld = len(data)
print("Len must be 4: " + str(ld))
self.assertTrue(ld == 4)
if __name__ == "__main__":
unittest.main()
| 27.768116
| 76
| 0.485517
| 1,093
| 7,664
| 3.278134
| 0.083257
| 0.118337
| 0.052749
| 0.043539
| 0.862406
| 0.824728
| 0.820821
| 0.798493
| 0.785654
| 0.785654
| 0
| 0.053544
| 0.329854
| 7,664
| 275
| 77
| 27.869091
| 0.644081
| 0.007698
| 0
| 0.785714
| 0
| 0
| 0.097182
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 1
| 0.040816
| false
| 0
| 0.030612
| 0
| 0.081633
| 0.260204
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
296bee64cc9a88240493e8dc790aaad6b2097da4
| 402
|
py
|
Python
|
tests/test_window.py
|
jabbalaci/jabbapylib3
|
ddc8fe88b89c4379254183b9a7c1405574a3a262
|
[
"MIT"
] | 6
|
2017-03-31T16:58:52.000Z
|
2019-05-11T20:12:07.000Z
|
tests/test_window.py
|
jabbalaci/jabbapylib3
|
ddc8fe88b89c4379254183b9a7c1405574a3a262
|
[
"MIT"
] | null | null | null |
tests/test_window.py
|
jabbalaci/jabbapylib3
|
ddc8fe88b89c4379254183b9a7c1405574a3a262
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from jplib import fs, window
def test_get_window_title_by_id():
# TODO
assert fs.which('xwininfo') is not None
def test_get_active_window_id():
# TODO
assert fs.which('xdotool') is not None
def test_activate_window_by_id():
# TODO
assert fs.which('xdotool') is not None
def test_toggle_fullscreen():
# TODO
assert fs.which('wmctrl') is not None
| 16.75
| 43
| 0.689055
| 63
| 402
| 4.15873
| 0.428571
| 0.10687
| 0.183206
| 0.259542
| 0.469466
| 0.408397
| 0.320611
| 0.320611
| 0.320611
| 0.320611
| 0
| 0.003155
| 0.211443
| 402
| 23
| 44
| 17.478261
| 0.823344
| 0.08209
| 0
| 0.222222
| 0
| 0
| 0.077135
| 0
| 0
| 0
| 0
| 0.043478
| 0.444444
| 1
| 0.444444
| true
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
29818e9f6206f504cea4baa054004a7a00216e36
| 13,558
|
py
|
Python
|
src/models/hnn.py
|
ngruver/decon-hnn
|
6e6c7e9962568214e1708fb933b715a39328fc7b
|
[
"Apache-2.0"
] | 6
|
2022-02-14T04:52:59.000Z
|
2022-03-08T05:11:34.000Z
|
src/models/hnn.py
|
ngruver/decon-hnn
|
6e6c7e9962568214e1708fb933b715a39328fc7b
|
[
"Apache-2.0"
] | null | null | null |
src/models/hnn.py
|
ngruver/decon-hnn
|
6e6c7e9962568214e1708fb933b715a39328fc7b
|
[
"Apache-2.0"
] | null | null | null |
import sys
import torch
from torch import Tensor
import torch.nn as nn
import numpy as np
from torchdiffeq import odeint
from .utils import FCsoftplus, FCtanh, FCswish, Reshape, Linear, CosSin
from ..dynamics.hamiltonian import HamiltonianDynamics, mHamiltonianDynamics, GeneralizedT
from typing import Tuple
def get_linear_layers(net):
layers = []
for layer in net:
if isinstance(layer, nn.Linear):
layers.append(layer)
elif isinstance(layer, nn.Sequential):
layers += [_layer for _layer in layer \
if isinstance(_layer, nn.Linear)]
return layers
class HNN(nn.Module):
def __init__(
self,
G,
dof_ndim: int = 1,
hidden_size: int = 256,
num_layers: int = 2,
canonical: bool = False,
angular_dims: Tuple = tuple(),
wgrad: bool = True,
**kwargs
):
super().__init__()
self.nfe = 0
self.canonical = canonical
self.q_ndim = dof_ndim
self.angular_dims = angular_dims
# We parameterize angular dims in terms of cos(theta), sin(theta)
chs = [self.q_ndim + len(angular_dims)] + num_layers * [hidden_size]
self.potential_net = nn.Sequential(
CosSin(self.q_ndim, angular_dims, only_q=True),
*[
FCtanh(chs[i], chs[i + 1], zero_bias=False, orthogonal_init=True)
for i in range(num_layers)
],
Linear(chs[-1], 1, zero_bias=False, orthogonal_init=True),
Reshape(-1)
)
print("HNN currently assumes potential energy depends only on q")
print("HNN currently assumes time independent Hamiltonian")
self.mass_net = nn.Sequential(
CosSin(self.q_ndim, angular_dims, only_q=True),
*[
FCtanh(chs[i], chs[i + 1], zero_bias=False, orthogonal_init=True)
for i in range(num_layers)
],
Linear(chs[-1], self.q_ndim * self.q_ndim, zero_bias=True, orthogonal_init=True),
Reshape(-1, self.q_ndim, self.q_ndim)
)
self.dynamics = HamiltonianDynamics(self.H, wgrad=wgrad)
for layer in get_linear_layers(self.potential_net):
layer.weight.data = layer.weight.data
def H(self, t, z):
""" Compute the Hamiltonian H(t, q, p)
Args:
t: Scalar Tensor representing time
z: N x D Tensor of the N different states in D dimensions.
Assumes that z is [q, p].
Returns: Size N Hamiltonian Tensor
"""
assert (t.ndim == 0) and (z.ndim == 2)
assert z.size(-1) == 2 * self.q_ndim
q, p = z.chunk(2, dim=-1)
V = self.potential_net(q)
Minv = self.Minv(q)
T = GeneralizedT(p, Minv)
return T + V
def tril_Minv(self, q):
mass_net_q = self.mass_net(q)
res = torch.triu(mass_net_q, diagonal=1)
# Constrain diagonal of Cholesky to be positive
res = res + torch.diag_embed(
torch.nn.functional.softplus(torch.diagonal(mass_net_q, dim1=-2, dim2=-1)),
dim1=-2,
dim2=-1,
)
#print(torch.nn.functional.softplus(torch.diagonal(mass_net_q, dim1=-2, dim2=-1)).min())
res = res.transpose(-1, -2) # Make lower triangular
return res
def Minv(self, q: Tensor, eps=1e-4) -> Tensor:
"""Compute the learned inverse mass matrix M^{-1}(q)
Args:
q: bs x D Tensor representing the position
"""
assert q.ndim == 2
lower_triangular = self.tril_Minv(q)
assert lower_triangular.ndim == 3
diag_noise = eps * torch.eye(lower_triangular.size(-1), dtype=q.dtype, device=q.device)
Minv = lower_triangular.matmul(lower_triangular.transpose(-2, -1)) + diag_noise
# print(torch.symeig(Minv)[0].mean(0))
return Minv
def M(self, q, eps=1e-4):
"""Returns a function that multiplies the mass matrix M(q) by a vector qdot
Args:
q: bs x D Tensor representing the position
"""
assert q.ndim == 2
lower_triangular = self.tril_Minv(q)
assert lower_triangular.ndim == 3
def M_func(qdot):
assert qdot.ndim == 2
qdot = qdot.unsqueeze(-1)
diag_noise = eps * torch.eye(lower_triangular.size(-1), dtype=qdot.dtype, device=qdot.device)
# print(f"mass matrix: {(lower_triangular @ lower_triangular.transpose(-2, -1) + diag_noise).mean()}")
Minv = lower_triangular @ lower_triangular.transpose(-2, -1) + diag_noise
# print(Minv.mean(0))
# print(torch.symeig(Minv)[0].mean(0))
M_times_qdot = torch.solve(qdot, Minv).solution.squeeze(-1)
return M_times_qdot
return M_func
def forward(self, t, z):
""" Computes a batch of `NxD` time derivatives of the state `z` at time `t`
Args:
t: Scalar Tensor of the current time
z: N x D Tensor of the N different states in D dimensions
Returns: N x D Tensor of the time derivatives
"""
assert (t.ndim == 0) and (z.ndim == 2)
dz_dt = self.dynamics(t, z)
# dz_dt[z.abs() > 1] = 0
self.nfe += 1
return dz_dt
def integrate(self, z0, ts, tol=1e-5, method="rk4"):
""" Integrates an initial state forward in time according to the learned Hamiltonian dynamics
Args:
z0: (N x 2 x D) sized
Tensor representing initial state. N is the batch size
ts: a length T Tensor representing the time points to evaluate at
tol: integrator tolerance
Returns: a N x T x 2 x D sized Tensor
"""
assert (z0.ndim == 3) and (ts.ndim == 1)
assert z0.shape[-1] == self.q_ndim
bs, _, D = z0.size()
assert D == self.q_ndim
z0 = z0.reshape(bs, -1) # -> bs x D
if self.canonical:
q0, p0 = z0.chunk(2, dim=-1)
else:
q0, v0 = z0.chunk(2, dim=-1)
p0 = self.M(q0)(v0) #(DxD)*(bsxD) -> (bsxD)
self.nfe = 0 # reset each forward pass
qp0 = torch.cat([q0, p0], dim=-1)
qpt = odeint(self, qp0, ts, rtol=tol, method=method)
qpt = qpt.permute(1, 0, 2) # T x N x D -> N x T x D
# print(f"mean: {qpt.mean((0,1)).cpu().detach().numpy()}")
# print(torch.min(qpt.reshape(-1, qpt.shape[-1])))
# print(torch.max(qpt.reshape(-1, qpt.shape[-1])))
# print("\n")
# self._acc_magn = self.acc_magn(qpt)
# self._H_magn = self.H_magn(qpt)
if self.canonical:
qpt = qpt.reshape(bs, len(ts), 2, D)
return qpt
else:
qt, pt = qpt.reshape(-1, 2 * self.q_ndim).chunk(2, dim=-1)
vt = self.Minv(qt).matmul(pt.unsqueeze(-1)).squeeze(-1)
qvt = torch.cat([qt, vt], dim=-1)
qvt = qvt.reshape(bs, len(ts), 2, D)
return qvt
def acc_magn(self, qpt):
dz_dt = self.dynamics(torch.zeros(1)[0], qpt.reshape(-1, qpt.shape[-1]))
magnitude = dz_dt.chunk(2, dim=-1)[1].pow(2).mean()
return magnitude
def H_magn(self, qpt):
H = self.H(torch.zeros(1)[0], qpt.reshape(-1, qpt.shape[-1]))
magnitude = H.pow(2).mean()
return magnitude
@property
def param_groups(self):
return self.parameters()
# def log_data(self,logger,step,name):
# logger.add_scalars('info',
# {'acc_magn': self._acc_magn.cpu().data.numpy(),
# 'H_magn': self._H_magn.cpu().data.numpy()},
# step)
class NonseparableHNN(nn.Module):
def __init__(
self,
G,
dof_ndim: int = 1,
hidden_size: int = 256,
num_layers: int = 2,
canonical: bool = False,
angular_dims: Tuple = tuple(),
wgrad: bool = True,
**kwargs
):
super().__init__(**kwargs)
self.nfe = 0
self.canonical = canonical
self.q_ndim = dof_ndim
self.angular_dims = angular_dims
# We parameterize angular dims in terms of cos(theta), sin(theta)
chs = [2 * self.q_ndim + len(angular_dims)] + num_layers * [hidden_size]
self.h_net = nn.Sequential(
CosSin(self.q_ndim, angular_dims, only_q=False),
*[
FCtanh(chs[i], chs[i + 1], zero_bias=False, orthogonal_init=True)
for i in range(num_layers)
],
Linear(chs[-1], 1, zero_bias=False, orthogonal_init=True),
Reshape(-1)
)
print("HNN currently assumes potential energy depends only on q")
print("HNN currently assumes time independent Hamiltonian")
chs = [self.q_ndim + len(angular_dims)] + num_layers * [hidden_size]
self.mass_net = nn.Sequential(
CosSin(self.q_ndim, angular_dims, only_q=True),
*[
FCtanh(chs[i], chs[i + 1], zero_bias=False, orthogonal_init=True)
for i in range(num_layers)
],
Linear(chs[-1], self.q_ndim * self.q_ndim, zero_bias=True, orthogonal_init=True),
Reshape(-1, self.q_ndim, self.q_ndim)
)
self.dynamics = HamiltonianDynamics(self.H, wgrad=wgrad)
def H(self, t, z):
""" Compute the Hamiltonian H(t, q, p)
Args:
t: Scalar Tensor representing time
z: N x D Tensor of the N different states in D dimensions.
Assumes that z is [q, p].
Returns: Size N Hamiltonian Tensor
"""
assert (t.ndim == 0) and (z.ndim == 2)
assert z.size(-1) == 2 * self.q_ndim
return self.h_net(z)
def tril_Minv(self, q):
mass_net_q = self.mass_net(q)
res = torch.triu(mass_net_q, diagonal=1)
# Constrain diagonal of Cholesky to be positive
res = res + torch.diag_embed(
torch.nn.functional.softplus(torch.diagonal(mass_net_q, dim1=-2, dim2=-1)),
dim1=-2,
dim2=-1,
)
#print(torch.nn.functional.softplus(torch.diagonal(mass_net_q, dim1=-2, dim2=-1)).min())
res = res.transpose(-1, -2) # Make lower triangular
return res
def Minv(self, q: Tensor, eps=1e-4) -> Tensor:
"""Compute the learned inverse mass matrix M^{-1}(q)
Args:
q: bs x D Tensor representing the position
"""
assert q.ndim == 2
lower_triangular = self.tril_Minv(q)
assert lower_triangular.ndim == 3
diag_noise = eps * torch.eye(lower_triangular.size(-1), dtype=q.dtype, device=q.device)
Minv = lower_triangular.matmul(lower_triangular.transpose(-2, -1)) + diag_noise
# print(Minv.mean(0))
# print(torch.symeig(Minv)[0].mean(0))
return Minv
def M(self, q, eps=1e-4):
"""Returns a function that multiplies the mass matrix M(q) by a vector qdot
Args:
q: bs x D Tensor representing the position
"""
assert q.ndim == 2
lower_triangular = self.tril_Minv(q)
assert lower_triangular.ndim == 3
def M_func(qdot):
assert qdot.ndim == 2
qdot = qdot.unsqueeze(-1)
diag_noise = eps * torch.eye(lower_triangular.size(-1), dtype=qdot.dtype, device=qdot.device)
M_times_qdot = torch.solve(
qdot,
lower_triangular @ lower_triangular.transpose(-2, -1) + diag_noise
).solution.squeeze(-1)
return M_times_qdot
return M_func
def forward(self, t, z):
""" Computes a batch of `NxD` time derivatives of the state `z` at time `t`
Args:
t: Scalar Tensor of the current time
z: N x D Tensor of the N different states in D dimensions
Returns: N x D Tensor of the time derivatives
"""
assert (t.ndim == 0) and (z.ndim == 2)
dz_dt = self.dynamics(t, z)
self.nfe += 1
return dz_dt
def integrate(self, z0, ts, tol=1e-5, method="rk4"):
""" Integrates an initial state forward in time according to the learned Hamiltonian dynamics
Args:
z0: (N x 2 x D) sized
Tensor representing initial state. N is the batch size
ts: a length T Tensor representing the time points to evaluate at
tol: integrator tolerance
Returns: a N x T x 2 x D sized Tensor
"""
assert (z0.ndim == 3) and (ts.ndim == 1)
assert z0.shape[-1] == self.q_ndim
bs, _, D = z0.size()
assert D == self.q_ndim
z0 = z0.reshape(bs, -1) # -> bs x D
if self.canonical:
q0, p0 = z0.chunk(2, dim=-1)
else:
q0, v0 = z0.chunk(2, dim=-1)
p0 = self.M(q0)(v0) #(DxD)*(bsxD) -> (bsxD)
self.nfe = 0 # reset each forward pass
qp0 = torch.cat([q0, p0], dim=-1)
qpt = odeint(self, qp0, ts, rtol=tol, method=method)
qpt = qpt.permute(1, 0, 2) # T x N x D -> N x T x D
if self.canonical:
qpt = qpt.reshape(bs, len(ts), 2, D)
return qpt
else:
qt, pt = qpt.reshape(-1, 2 * self.q_ndim).chunk(2, dim=-1)
vt = self.Minv(qt).matmul(pt.unsqueeze(-1)).squeeze(-1)
qvt = torch.cat([qt, vt], dim=-1)
qvt = qvt.reshape(bs, len(ts), 2, D)
return qvt
| 36.842391
| 114
| 0.557678
| 1,879
| 13,558
| 3.916977
| 0.124002
| 0.02106
| 0.030571
| 0.01087
| 0.879212
| 0.857473
| 0.850951
| 0.844158
| 0.844158
| 0.828533
| 0
| 0.025454
| 0.32195
| 13,558
| 367
| 115
| 36.942779
| 0.775155
| 0.245243
| 0
| 0.770833
| 0
| 0
| 0.022364
| 0
| 0
| 0
| 0
| 0
| 0.091667
| 1
| 0.083333
| false
| 0
| 0.0375
| 0.004167
| 0.2125
| 0.016667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
466e79a5c7a2663c833bc769a3582417a5f37d13
| 49
|
py
|
Python
|
utils/__init__.py
|
zhanzheng8585/badnets-pytorch
|
673ff45940eff4cdc6db4f7d6a798cfa97f19de5
|
[
"MIT"
] | 59
|
2020-08-08T08:25:49.000Z
|
2022-03-30T03:48:46.000Z
|
utils/__init__.py
|
zhanzheng8585/badnets-pytorch
|
673ff45940eff4cdc6db4f7d6a798cfa97f19de5
|
[
"MIT"
] | 1
|
2021-12-30T09:25:37.000Z
|
2021-12-30T09:25:37.000Z
|
utils/__init__.py
|
zhanzheng8585/badnets-pytorch
|
673ff45940eff4cdc6db4f7d6a798cfa97f19de5
|
[
"MIT"
] | 11
|
2020-08-13T10:56:32.000Z
|
2022-02-21T09:00:40.000Z
|
from .utils import array2img, print_model_perform
| 49
| 49
| 0.877551
| 7
| 49
| 5.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.081633
| 49
| 1
| 49
| 49
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
d3f354621d772b13d3ebe617bb7d641fe6ea0efd
| 230
|
py
|
Python
|
pyrfuniverse/side_channel/__init__.py
|
happyCoderJDFJJ/pyrfuniverse
|
8ddb6e0d8f113015ba820a327388a528a8b215c7
|
[
"Apache-2.0"
] | null | null | null |
pyrfuniverse/side_channel/__init__.py
|
happyCoderJDFJJ/pyrfuniverse
|
8ddb6e0d8f113015ba820a327388a528a8b215c7
|
[
"Apache-2.0"
] | null | null | null |
pyrfuniverse/side_channel/__init__.py
|
happyCoderJDFJJ/pyrfuniverse
|
8ddb6e0d8f113015ba820a327388a528a8b215c7
|
[
"Apache-2.0"
] | null | null | null |
from pyrfuniverse.side_channel.incoming_message import IncomingMessage # noqa
from pyrfuniverse.side_channel.outgoing_message import OutgoingMessage # noqa
from pyrfuniverse.side_channel.side_channel import SideChannel # noqa
| 46
| 78
| 0.865217
| 27
| 230
| 7.148148
| 0.444444
| 0.227979
| 0.310881
| 0.419689
| 0.321244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095652
| 230
| 4
| 79
| 57.5
| 0.927885
| 0.06087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
31105d3c5c97877938707e4032985407f269e5d9
| 109
|
py
|
Python
|
thenumbers/random.py
|
extsoft/numbers-api
|
3484509ae60c02c04cb03c4f42ac99a3c6ae4a3c
|
[
"MIT"
] | 2
|
2019-12-17T11:34:43.000Z
|
2020-02-22T14:15:44.000Z
|
thenumbers/random.py
|
extsoft/numbers-api
|
3484509ae60c02c04cb03c4f42ac99a3c6ae4a3c
|
[
"MIT"
] | 2
|
2020-03-18T16:46:40.000Z
|
2021-03-23T10:32:25.000Z
|
thenumbers/random.py
|
extsoft/numbers-api
|
3484509ae60c02c04cb03c4f42ac99a3c6ae4a3c
|
[
"MIT"
] | 1
|
2022-01-02T12:51:49.000Z
|
2022-01-02T12:51:49.000Z
|
import random
import sys
def number() -> str:
return str(random.randint(0, sys.maxsize)) # noqa: S311
| 15.571429
| 60
| 0.678899
| 16
| 109
| 4.625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.192661
| 109
| 6
| 61
| 18.166667
| 0.795455
| 0.091743
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
312bca43cb654b82c2d3d00214a9db8c037769b7
| 12,984
|
py
|
Python
|
src/test/test_address.py
|
semo4/fast_api_test
|
21f34ff6a6f29597b910a3c9e64c222708915c33
|
[
"MIT"
] | null | null | null |
src/test/test_address.py
|
semo4/fast_api_test
|
21f34ff6a6f29597b910a3c9e64c222708915c33
|
[
"MIT"
] | null | null | null |
src/test/test_address.py
|
semo4/fast_api_test
|
21f34ff6a6f29597b910a3c9e64c222708915c33
|
[
"MIT"
] | null | null | null |
from pytest_mock import MockFixture
headers_ = {
'Authorization':
'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJvc0Bob3RtYWlsLmNvbSIsImV4cCI6MTYyNzU2NjE1Mn0.IRKCGqK_-5DOtKI2ozNTZqR7gV-B-pOw7YFoCNJD0EA'
}
def test_get_all_addresses(client, mocker: MockFixture):
mock_address = mocker.patch(
"controllers.address_controllers.address.select")
mock_address.return_value.execute.return_value = []
response = client.get('/address/', headers=headers_)
assert response.status_code == 404
def test_get_addresses(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.execute.return_value = [{
"id":
"3fa85f64-5717-4562-b3fc-2c963f66afa6",
"name":
"string",
"zip_code":
"string",
"building_number":
0,
"street_name":
"string",
"created_at":
"2021-07-29T13:19:28.615Z",
"updated_at":
"2021-07-29T13:19:28.615Z"
}]
response = client.get('/address/', headers=headers_)
assert response.status_code == 200
# def test_unauthorized(client):
# response = client.get('/address/')
# assert response.status_code == 401
# assert response.json()['message'] == 'Not UNAUTHORIZED'
def test_get_empty_address_by_id(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.execute.return_value.first.return_value = {}
response = client.get('/address/29309b48-fff3-4b77-b07b-c4006e899f78',
headers=headers_)
assert response.status_code == 404
def test_get_address_by_id(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.execute.return_value.first.return_value = {
"id": "29309b48-fff3-4b77-b07b-c4006e899fc1",
"name": "Amman55",
"zip_code": "11111",
"building_number": 5,
"street_name": "mainstretamman",
"created_at": "2021-07-21T18:35:57.928243",
"updated_at": "2021-07-21T18:35:57.928250"
}
response = client.get('/address/29309b48-fff3-4b77-b07b-c4006e899fc1',
headers=headers_)
assert response.status_code == 200
def test_post(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.insert')
mock_address.return_value.values.return_value.returning.return_value.\
execute.return_value.first.return_value = {
"id": "5796d483-8a28-48b0-b288-fda6454041b5",
"name": "Irbid",
"zip_code": "25252",
"building_number": 7,
"street_name": "mainstreet",
"created_at": "2021-07-25T07:43:03.015999",
"updated_at": "2021-07-25T07:43:03.016005"
}
result = {
"name": "Irbid",
"zip_code": "25252",
"building_number": 7,
"street_name": "mainstreet",
}
response = client.post('/address/', headers=headers_, json=result)
assert response.status_code == 201
def test_post_missing_value_address(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.insert')
mock_address.return_value.values.return_value.returning.return_value.\
execute.return_value.first.return_value = {
"name": "Irbid",
"zip_code": "25252",
"building_number": 7,
"street_name": "mainstreet"
}
result = {"zip_code": "25252", "building_number": 7}
response = client.post('/address/', headers=headers_, json=result)
assert response.status_code == 422
def test_delete_not_exist(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.\
execute.return_value.first.return_value = {}
response = client.delete('/address/29309b48-fff3-4b77-b07b-c4006e899fc1',
headers=headers_)
assert response.status_code == 404
def test_delete_address(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.\
execute.return_value.first.return_value = {
"id": "29309b48-fff3-4b77-b07b-c4006e899fc1",
"name": "Amman55",
"zip_code": "11111",
"building_number": 5,
"street_name": "mainstretamman",
"created_at": "2021-07-21T18:35:57.928243",
"updated_at": "2021-07-21T18:35:57.928250"
}
mock_address = mocker.patch(
'controllers.address_controllers.address.delete')
mock_address.return_value.where.return_value.returning.return_value.\
execute.return_value.first.return_value = {
"id": "29309b48-fff3-4b77-b07b-c4006e899fc1",
"name": "Amman55",
"zip_code": "11111",
"building_number": 5,
"street_name": "mainstretamman",
"created_at": "2021-07-21T18:35:57.928243",
"updated_at": "2021-07-21T18:35:57.928250"
}
response = client.delete('/address/29309b48-fff3-4b77-b07b-c4006e899fc1',
headers=headers_)
assert response.status_code == 204
def test_put(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.execute.return_value.first.return_value = {
"id": "29309b48-fff3-4b77-b07b-c4006e899fc1",
"name": "Amman",
"zip_code": "11111",
"building_number": 5,
"street_name": "mainstretamman",
"created_at": "2021-07-21T18:35:57.928243",
"updated_at": "2021-07-21T18:35:57.928250"
}
mock_address = mocker.patch(
'controllers.address_controllers.address.delete')
mock_address.return_value.where.return_value.execute.return_value.first.return_value = {
"id": "29309b48-fff3-4b77-b07b-c4006e899fc1",
"name": "Amman",
"zip_code": "11111",
"building_number": 5,
"street_name": "mainstretamman",
"created_at": "2021-07-21T18:35:57.928243",
"updated_at": "2021-07-21T18:35:57.928250"
}
mock_address = mocker.patch(
'controllers.address_controllers.address.insert')
mock_address.return_value.values.return_value.returning.return_value.\
execute.return_value.first.return_value = {
"id": "29309b48-fff3-4b77-b07b-c4006e899fc1",
"name": "Petra1",
"zip_code": "55558",
"building_number": 7,
"street_name": "mainstreet",
"created_at": "2021-07-21T18:35:57.928243",
"updated_at": "2021-07-21T18:35:57.928250"
}
result = {
"name": "Petra1",
"zip_code": "55558",
"building_number": 7,
"street_name": "mainstreet",
}
response = client.put('/address/29309b48-fff3-4b77-b07b-c4006e899fc1',
headers=headers_,
json=result)
assert response.status_code == 201
def test_put_not_exist(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.execute.return_value.first.return_value = {}
result = {
"name": "Petra1",
"zip_code": "55558",
"building_number": 7,
"street_name": "mainstreet",
}
response = client.put('/address/29309b48-fff3-4b77-b07b-c4006e899f45',
headers=headers_, json=result)
assert response.status_code == 404
def test_violate_put(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.update')
mock_address.return_value.where.return_value.values.return_value.returning.return_value.\
execute.return_value.first.return_value = {
"id": "5796d483-8a28-48b0-b288-fda6454041b5",
"name": "Petra",
"zip_code": "25252",
"building_number": 7,
"street_name": "mainstreet",
"created_at": "2021-07-25T07:43:03.015999",
"updated_at": "2021-07-25T07:43:03.016005"
}
result = {
"name": "Petra",
"zip_code": "25",
"building_number": 7,
"street_name": "mainstreet",
}
response = client.put('/address/5796d483-8a28-48b0-b288-fda6454041b5',
headers=headers_,
json=result)
assert response.status_code == 422
def test_patch(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.execute.return_value.first.return_value = {
"id": "29309b48-fff3-4b77-b07b-c4006e899fc1",
"name": "Amman",
"zip_code": "11111",
"building_number": 5,
"street_name": "mainstretamman",
"created_at": "2021-07-21T18:35:57.928243",
"updated_at": "2021-07-21T18:35:57.928250"
}
mock_address = mocker.patch(
'controllers.address_controllers.address.update')
mock_address.return_value.where.return_value.values.return_value.returning.return_value.\
execute.return_value.first.return_value = {
"id": "5796d483-8a28-48b0-b288-fda6454041b5",
"name": "Petra",
"zip_code": "25252",
"building_number": 7,
"street_name": "mainstreet",
"created_at": "2021-07-25T07:43:03.015999",
"updated_at": "2021-07-25T07:43:03.016005"
}
result = {
"name": "Petra",
"zip_code": "25252",
"building_number": 7,
"street_name": "mainstreet",
}
response = client.patch('/address/5796d483-8a28-48b0-b288-fda6454041b5',
headers=headers_,
json=result)
assert response.status_code == 201
def test_patch_not_exist(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.execute.return_value.first.return_value = {}
result = {
"name": "teststring",
"zip_code": "12563",
"building_number": 0,
"street_name": "teststring",
}
response = client.patch('/address/3fa85f64-5717-4562-b3fc-2c963f66afa6',
headers=headers_,
json=result)
assert response.status_code == 404
def test_violate_patch(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.update')
mock_address.return_value.where.return_value.values.return_value.returning.return_value.\
execute.return_value.first.return_value = {
"id": "5796d483-8a28-48b0-b288-fda6454041b5",
"name": "Petra",
"zip_code": "25252",
"building_number": 7,
"street_name": "mainstreet",
"created_at": "2021-07-25T07:43:03.015999",
"updated_at": "2021-07-25T07:43:03.016005"
}
result = {
"name": "Petra",
"zip_code": "25",
"building_number": 7,
"street_name": "mainstreet",
}
response = client.patch('/address/5796d483-8a28-48b0-b288-fda6454041b5',
headers=headers_,
json=result)
assert response.status_code == 422
def test_patch_404(client, mocker: MockFixture):
mock_address = mocker.patch(
'controllers.address_controllers.address.select')
mock_address.return_value.where.return_value.execute.return_value.first.return_value = {
"id": "5796d483-8a28-48b0-b288-fda6454041b5",
"name": "Petra",
"zip_code": "25252",
"building_number": 7,
"street_name": "mainstreet",
"created_at": "2021-07-25T07:43:03.015999",
"updated_at": "2021-07-25T07:43:03.016005"
}
mock_address = mocker.patch(
'controllers.address_controllers.address.update')
mock_address.return_value.where.return_value.values.return_value.returning.return_value.\
execute.return_value.first.return_value = {}
result = {
"name": "Petra",
"zip_code": "25458",
"building_number": 7,
"street_name": "mainstreet",
}
response = client.patch('/address/5796d483-8a28-48b0-b288-fda6454041b5',
headers=headers_,
json=result)
assert response.status_code == 404
| 37.744186
| 149
| 0.62577
| 1,391
| 12,984
| 5.614666
| 0.084112
| 0.123944
| 0.026633
| 0.056338
| 0.934059
| 0.917926
| 0.914469
| 0.909091
| 0.903329
| 0.899104
| 0
| 0.127175
| 0.242991
| 12,984
| 343
| 150
| 37.854227
| 0.667413
| 0.012939
| 0
| 0.761589
| 0
| 0
| 0.340879
| 0.210054
| 0
| 0
| 0
| 0
| 0.049669
| 1
| 0.049669
| false
| 0
| 0.003311
| 0
| 0.05298
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
319559f6dfea0b98418c04f59d078296b34ddad0
| 8,836
|
py
|
Python
|
src/util/ukp_arg_conv_folds.py
|
UKPLab/acl2022-structure-batches
|
d7e116c1254ad00d8b59da3116043424a30f6f64
|
[
"Apache-2.0"
] | null | null | null |
src/util/ukp_arg_conv_folds.py
|
UKPLab/acl2022-structure-batches
|
d7e116c1254ad00d8b59da3116043424a30f6f64
|
[
"Apache-2.0"
] | null | null | null |
src/util/ukp_arg_conv_folds.py
|
UKPLab/acl2022-structure-batches
|
d7e116c1254ad00d8b59da3116043424a30f6f64
|
[
"Apache-2.0"
] | null | null | null |
ukp_argument_folds = [[
[
'william-farquhar-ought-to-be-honoured-as-the-rightful-founder-of-singapore_yes-of-course-',
'india-has-the-potential-to-lead-the-world-_yes-for',
'if-your-spouse-committed-murder-and-he-or-she-confided-in-you-would-you-turn-them-in-_yes',
'evolution-vs-creation_evolution',
'is-it-better-to-have-a-lousy-father-or-to-be-fatherless-_fatherless',
'human-growth-and-development-should-parents-use-spanking-as-an-option-to-discipline-_no',
'gay-marriage-right-or-wrong_allowing-gay-marriage-is-right',
'tv-is-better-than-books_tv',
'christianity-or-atheism-_christianity',
'is-the-school-uniform-a-good-or-bad-idea-_good',
'ban-plastic-water-bottles_yes-emergencies-only',
'should-physical-education-be-mandatory-in-schools-_no-',
'evolution-vs-creation_creation',
'firefox-vs-internet-explorer_it-has-a-cute-logo-oh-and-extensions-err-add-ons',
'personal-pursuit-or-advancing-the-common-good-_advancing-the-commond-good',
'gay-marriage-right-or-wrong_allowing-gay-marriage-is-wrong',
'human-growth-and-development-should-parents-use-spanking-as-an-option-to-discipline-_yes',
'pro-choice-vs-pro-life_pro-life',
'is-porn-wrong-_yes-porn-is-wrong',
], [
'william-farquhar-ought-to-be-honoured-as-the-rightful-founder-of-singapore_no-it-is-raffles-',
'if-your-spouse-committed-murder-and-he-or-she-confided-in-you-would-you-turn-them-in-_no',
'firefox-vs-internet-explorer_there-s-more-browsers-than-the-ie-firefox-is-an-animal',
'pro-choice-vs-pro-life_pro-choice',
'ban-plastic-water-bottles_no-bad-for-the-economy',
],
[
'tv-is-better-than-books_books',
'is-porn-wrong-_no-is-is-not',
'should-physical-education-be-mandatory-in-schools-_yes-',
'is-it-better-to-have-a-lousy-father-or-to-be-fatherless-_lousy-father',
'christianity-or-atheism-_atheism',
'personal-pursuit-or-advancing-the-common-good-_personal-pursuit',
'is-the-school-uniform-a-good-or-bad-idea-_bad',
'india-has-the-potential-to-lead-the-world-_no-against'
]],
[[
'india-has-the-potential-to-lead-the-world-_yes-for',
'if-your-spouse-committed-murder-and-he-or-she-confided-in-you-would-you-turn-them-in-_yes',
'evolution-vs-creation_evolution',
'william-farquhar-ought-to-be-honoured-as-the-rightful-founder-of-singapore_no-it-is-raffles-',
'human-growth-and-development-should-parents-use-spanking-as-an-option-to-discipline-_no',
'ban-plastic-water-bottles_no-bad-for-the-economy',
'christianity-or-atheism-_christianity',
'gay-marriage-right-or-wrong_allowing-gay-marriage-is-wrong',
'tv-is-better-than-books_tv',
'ban-plastic-water-bottles_yes-emergencies-only',
'is-porn-wrong-_no-is-is-not',
'personal-pursuit-or-advancing-the-common-good-_personal-pursuit',
'should-physical-education-be-mandatory-in-schools-_yes-',
'is-it-better-to-have-a-lousy-father-or-to-be-fatherless-_lousy-father',
'christianity-or-atheism-_atheism',
'is-porn-wrong-_yes-porn-is-wrong',
'is-the-school-uniform-a-good-or-bad-idea-_bad',
'pro-choice-vs-pro-life_pro-choice',
'firefox-vs-internet-explorer_it-has-a-cute-logo-oh-and-extensions-err-add-ons',
], [
'should-physical-education-be-mandatory-in-schools-_no-',
'william-farquhar-ought-to-be-honoured-as-the-rightful-founder-of-singapore_yes-of-course-',
'evolution-vs-creation_creation',
'gay-marriage-right-or-wrong_allowing-gay-marriage-is-right',
'personal-pursuit-or-advancing-the-common-good-_advancing-the-commond-good',
], [
'human-growth-and-development-should-parents-use-spanking-as-an-option-to-discipline-_yes',
'pro-choice-vs-pro-life_pro-life',
'tv-is-better-than-books_books',
'is-the-school-uniform-a-good-or-bad-idea-_good',
'is-it-better-to-have-a-lousy-father-or-to-be-fatherless-_fatherless',
'if-your-spouse-committed-murder-and-he-or-she-confided-in-you-would-you-turn-them-in-_no',
'firefox-vs-internet-explorer_there-s-more-browsers-than-the-ie-firefox-is-an-animal',
'india-has-the-potential-to-lead-the-world-_no-against'
]],
[[
'human-growth-and-development-should-parents-use-spanking-as-an-option-to-discipline-_yes',
'pro-choice-vs-pro-life_pro-life',
'tv-is-better-than-books_books',
'is-it-better-to-have-a-lousy-father-or-to-be-fatherless-_fatherless',
'if-your-spouse-committed-murder-and-he-or-she-confided-in-you-would-you-turn-them-in-_no',
'firefox-vs-internet-explorer_there-s-more-browsers-than-the-ie-firefox-is-an-animal',
'pro-choice-vs-pro-life_pro-choice',
'gay-marriage-right-or-wrong_allowing-gay-marriage-is-right',
'is-porn-wrong-_no-is-is-not',
'should-physical-education-be-mandatory-in-schools-_yes-',
'is-it-better-to-have-a-lousy-father-or-to-be-fatherless-_lousy-father',
'christianity-or-atheism-_atheism',
'is-porn-wrong-_yes-porn-is-wrong',
'personal-pursuit-or-advancing-the-common-good-_personal-pursuit',
'is-the-school-uniform-a-good-or-bad-idea-_bad',
'william-farquhar-ought-to-be-honoured-as-the-rightful-founder-of-singapore_yes-of-course-',
'india-has-the-potential-to-lead-the-world-_yes-for',
'ban-plastic-water-bottles_no-bad-for-the-economy',
'evolution-vs-creation_evolution',
], [
'india-has-the-potential-to-lead-the-world-_no-against',
'william-farquhar-ought-to-be-honoured-as-the-rightful-founder-of-singapore_no-it-is-raffles-',
'human-growth-and-development-should-parents-use-spanking-as-an-option-to-discipline-_no',
'if-your-spouse-committed-murder-and-he-or-she-confided-in-you-would-you-turn-them-in-_yes',
'is-the-school-uniform-a-good-or-bad-idea-_good',
], [
'christianity-or-atheism-_christianity',
'tv-is-better-than-books_tv',
'ban-plastic-water-bottles_yes-emergencies-only',
'should-physical-education-be-mandatory-in-schools-_no-',
'evolution-vs-creation_creation',
'firefox-vs-internet-explorer_it-has-a-cute-logo-oh-and-extensions-err-add-ons',
'personal-pursuit-or-advancing-the-common-good-_advancing-the-commond-good',
'gay-marriage-right-or-wrong_allowing-gay-marriage-is-wrong',
]],
[[
'christianity-or-atheism-_christianity',
'tv-is-better-than-books_tv',
'ban-plastic-water-bottles_yes-emergencies-only',
'should-physical-education-be-mandatory-in-schools-_no-',
'evolution-vs-creation_creation',
'firefox-vs-internet-explorer_it-has-a-cute-logo-oh-and-extensions-err-add-ons',
'personal-pursuit-or-advancing-the-common-good-_advancing-the-commond-good',
'gay-marriage-right-or-wrong_allowing-gay-marriage-is-wrong',
'human-growth-and-development-should-parents-use-spanking-as-an-option-to-discipline-_yes',
'pro-choice-vs-pro-life_pro-life',
'tv-is-better-than-books_books',
'is-the-school-uniform-a-good-or-bad-idea-_good',
'is-it-better-to-have-a-lousy-father-or-to-be-fatherless-_fatherless',
'if-your-spouse-committed-murder-and-he-or-she-confided-in-you-would-you-turn-them-in-_no',
'pro-choice-vs-pro-life_pro-choice',
'is-porn-wrong-_no-is-is-not',
'william-farquhar-ought-to-be-honoured-as-the-rightful-founder-of-singapore_no-it-is-raffles-',
'should-physical-education-be-mandatory-in-schools-_yes-',
'is-it-better-to-have-a-lousy-father-or-to-be-fatherless-_lousy-father',
], [
'christianity-or-atheism-_atheism',
'is-porn-wrong-_yes-porn-is-wrong',
'personal-pursuit-or-advancing-the-common-good-_personal-pursuit',
'is-the-school-uniform-a-good-or-bad-idea-_bad',
'india-has-the-potential-to-lead-the-world-_no-against'
], [
'william-farquhar-ought-to-be-honoured-as-the-rightful-founder-of-singapore_yes-of-course-',
'india-has-the-potential-to-lead-the-world-_yes-for',
'if-your-spouse-committed-murder-and-he-or-she-confided-in-you-would-you-turn-them-in-_yes',
'firefox-vs-internet-explorer_there-s-more-browsers-than-the-ie-firefox-is-an-animal',
'evolution-vs-creation_evolution',
'human-growth-and-development-should-parents-use-spanking-as-an-option-to-discipline-_no',
'gay-marriage-right-or-wrong_allowing-gay-marriage-is-right',
'ban-plastic-water-bottles_no-bad-for-the-economy',
]]]
| 60.108844
| 103
| 0.684699
| 1,283
| 8,836
| 4.614186
| 0.088854
| 0.010811
| 0.027027
| 0.02973
| 0.972128
| 0.972128
| 0.968581
| 0.944426
| 0.936824
| 0.900676
| 0
| 0
| 0.141127
| 8,836
| 146
| 104
| 60.520548
| 0.780076
| 0
| 0
| 0.958904
| 0
| 0.30137
| 0.808511
| 0.808511
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
319a889daa3549c1c6a46fe69bb67958fee539da
| 7,903
|
py
|
Python
|
Server-Test-PDP-Exd/4.Mix/run.py
|
Parwatsingh/OptSmart
|
0564abdd04e7bc37a3586982a1d7ca5a97be88d5
|
[
"Apache-2.0"
] | 1
|
2021-03-13T08:55:13.000Z
|
2021-03-13T08:55:13.000Z
|
Server-Test-PDP-Exd/4.Mix/run.py
|
Parwatsingh/OptSmart
|
0564abdd04e7bc37a3586982a1d7ca5a97be88d5
|
[
"Apache-2.0"
] | null | null | null |
Server-Test-PDP-Exd/4.Mix/run.py
|
Parwatsingh/OptSmart
|
0564abdd04e7bc37a3586982a1d7ca5a97be88d5
|
[
"Apache-2.0"
] | null | null | null |
import subprocess
GenAUs = ["g++", "-std=c++17", "GenAUs.cpp", "-o", "genAUs", "-O3", "-g"]
subprocess.call(GenAUs)
print("\n\n\n---------------- Workload 1 ----------------\n")
################ Warmup-Run ##################
cmd = ["./genAUs", "50", "900", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "1000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "2000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "3000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "4000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "5000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "6000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
############################ Workload 2 ####################################
print("\n\n\n---------------- Workload 2 ----------------\n")
cmd = ["./genAUs", "10", "3000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "20", "3000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "30", "3000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "40", "3000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "60", "3000", "334", "64", "268", "334", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
############################ Workload 3 ####################################
cmd = ["./genAUs", "50", "3000", "668", "128", "536", "668", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "3000", "1002", "192", "784", "1002", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "3000", "1336", "256", "1052", "1336", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "3000", "1670", "320", "1340", "1670", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
cmd = ["./genAUs", "50", "3000", "2004", "384", "1608", "2004", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n--------------------------------------------------------------\n")
| 43.905556
| 76
| 0.50677
| 954
| 7,903
| 4.198113
| 0.06499
| 0.36005
| 0.424469
| 0.48814
| 0.944569
| 0.936579
| 0.936579
| 0.936579
| 0.936579
| 0.936579
| 0
| 0.053922
| 0.070733
| 7,903
| 179
| 77
| 44.150838
| 0.491422
| 0.004302
| 0
| 0.843972
| 0
| 0
| 0.441531
| 0.15133
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007092
| 0
| 0.007092
| 0.134752
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
31a579fc4cbf44c2d990a7b06b935f9f15a0b3fd
| 5,859
|
py
|
Python
|
migrations/versions/419108de3b3d_.py
|
AlexKouzy/ethnicity-facts-and-figures-publisher
|
18ab2495a8633f585e18e607c7f75daa564a053d
|
[
"MIT"
] | null | null | null |
migrations/versions/419108de3b3d_.py
|
AlexKouzy/ethnicity-facts-and-figures-publisher
|
18ab2495a8633f585e18e607c7f75daa564a053d
|
[
"MIT"
] | null | null | null |
migrations/versions/419108de3b3d_.py
|
AlexKouzy/ethnicity-facts-and-figures-publisher
|
18ab2495a8633f585e18e607c7f75daa564a053d
|
[
"MIT"
] | null | null | null |
"""empty message
Revision ID: 419108de3b3d
Revises: 5382560efddb
Create Date: 2017-08-24 14:11:13.151318
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '419108de3b3d'
down_revision = '5382560efddb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('db_page', sa.Column('secondary_source_1_contact_1_email', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_contact_1_name', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_contact_1_phone', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_contact_2_email', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_contact_2_name', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_contact_2_phone', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_date', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_date_next_update', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_date_updated', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_disclosure_control', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_frequency', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_publisher', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_statistic_type', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_suppression_rules', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_title', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_1_url', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_contact_1_email', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_contact_1_name', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_contact_1_phone', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_contact_2_email', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_contact_2_name', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_contact_2_phone', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_date', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_date_next_update', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_date_updated', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_disclosure_control', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_frequency', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_publisher', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_statistic_type', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_suppression_rules', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_title', sa.TEXT(), nullable=True))
op.add_column('db_page', sa.Column('secondary_source_2_url', sa.TEXT(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('db_page', 'secondary_source_2_url')
op.drop_column('db_page', 'secondary_source_2_title')
op.drop_column('db_page', 'secondary_source_2_suppression_rules')
op.drop_column('db_page', 'secondary_source_2_statistic_type')
op.drop_column('db_page', 'secondary_source_2_publisher')
op.drop_column('db_page', 'secondary_source_2_frequency')
op.drop_column('db_page', 'secondary_source_2_disclosure_control')
op.drop_column('db_page', 'secondary_source_2_date_updated')
op.drop_column('db_page', 'secondary_source_2_date_next_update')
op.drop_column('db_page', 'secondary_source_2_date')
op.drop_column('db_page', 'secondary_source_2_contact_2_phone')
op.drop_column('db_page', 'secondary_source_2_contact_2_name')
op.drop_column('db_page', 'secondary_source_2_contact_2_email')
op.drop_column('db_page', 'secondary_source_2_contact_1_phone')
op.drop_column('db_page', 'secondary_source_2_contact_1_name')
op.drop_column('db_page', 'secondary_source_2_contact_1_email')
op.drop_column('db_page', 'secondary_source_1_url')
op.drop_column('db_page', 'secondary_source_1_title')
op.drop_column('db_page', 'secondary_source_1_suppression_rules')
op.drop_column('db_page', 'secondary_source_1_statistic_type')
op.drop_column('db_page', 'secondary_source_1_publisher')
op.drop_column('db_page', 'secondary_source_1_frequency')
op.drop_column('db_page', 'secondary_source_1_disclosure_control')
op.drop_column('db_page', 'secondary_source_1_date_updated')
op.drop_column('db_page', 'secondary_source_1_date_next_update')
op.drop_column('db_page', 'secondary_source_1_date')
op.drop_column('db_page', 'secondary_source_1_contact_2_phone')
op.drop_column('db_page', 'secondary_source_1_contact_2_name')
op.drop_column('db_page', 'secondary_source_1_contact_2_email')
op.drop_column('db_page', 'secondary_source_1_contact_1_phone')
op.drop_column('db_page', 'secondary_source_1_contact_1_name')
op.drop_column('db_page', 'secondary_source_1_contact_1_email')
# ### end Alembic commands ###
| 64.384615
| 106
| 0.759174
| 904
| 5,859
| 4.483407
| 0.077434
| 0.126326
| 0.189489
| 0.10264
| 0.929682
| 0.92376
| 0.922527
| 0.922527
| 0.812485
| 0.706144
| 0
| 0.025984
| 0.093531
| 5,859
| 90
| 107
| 65.1
| 0.737149
| 0.05035
| 0
| 0
| 0
| 0
| 0.446535
| 0.361136
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027778
| false
| 0
| 0.027778
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31a9813af21e42a543dd89957438d8954fe3ca84
| 1,777
|
py
|
Python
|
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v2/strings/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 3
|
2020-10-12T15:47:01.000Z
|
2022-01-14T19:51:26.000Z
|
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v2/strings/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | null | null | null |
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v2/strings/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 2
|
2020-08-03T13:02:06.000Z
|
2020-11-04T03:15:44.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Operations for working with string Tensors.
"""
from __future__ import print_function as _print_function
from tensorflow.python import as_string
from tensorflow.python import reduce_join_v2 as reduce_join
from tensorflow.python import regex_full_match
from tensorflow.python import regex_replace
from tensorflow.python import string_format as format
from tensorflow.python import string_join as join
from tensorflow.python import string_length_v2 as length
from tensorflow.python import string_lower as lower
from tensorflow.python import string_strip as strip
from tensorflow.python import string_to_hash_bucket as to_hash_bucket
from tensorflow.python import string_to_hash_bucket_fast as to_hash_bucket_fast
from tensorflow.python import string_to_hash_bucket_strong as to_hash_bucket_strong
from tensorflow.python import string_to_number as to_number
from tensorflow.python import string_upper as upper
from tensorflow.python import substr_v2 as substr
from tensorflow.python import unicode_script
from tensorflow.python import unicode_transcode
from tensorflow.python.ops.ragged.ragged_string_ops import string_bytes_split as bytes_split
from tensorflow.python.ops.ragged.ragged_string_ops import string_split_v2 as split
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_decode
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_decode_with_offsets
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_encode
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_split
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_split_with_offsets
del _print_function
| 52.264706
| 92
| 0.876196
| 273
| 1,777
| 5.417582
| 0.212454
| 0.270453
| 0.324544
| 0.298851
| 0.62407
| 0.402299
| 0.37931
| 0.37931
| 0.290061
| 0.290061
| 0
| 0.002468
| 0.087788
| 1,777
| 33
| 93
| 53.848485
| 0.909932
| 0.095667
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.961538
| 0
| 0.961538
| 0.076923
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
31ca055c3eb4fa0acd56dac5b0f5ffb0fd15980a
| 1,657
|
py
|
Python
|
ivy/container/utility.py
|
VedPatwardhan/ivy
|
7b2105fa8cf38879444a1029bfaa7f0b2f27717a
|
[
"Apache-2.0"
] | 1
|
2022-02-13T19:35:02.000Z
|
2022-02-13T19:35:02.000Z
|
ivy/container/utility.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
ivy/container/utility.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
# global
from typing import Optional, Union, List, Dict, Tuple
# local
import ivy
from ivy.container.base import ContainerBase
# ToDo: implement all methods here as public instance methods
# noinspection PyMissingConstructor
class ContainerWithUtility(ContainerBase):
def all(
self: ivy.Container,
axis: Optional[Union[int, Tuple[int], List[int]]] = None,
keepdims: bool = False,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
out: Optional[ivy.Container] = None,
) -> ivy.Container:
return self.handle_inplace(
self.map(
lambda x_, _: ivy.all(x_, axis, keepdims) if ivy.is_array(x_) else x_,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
),
out,
)
def any(
self: ivy.Container,
axis: Optional[Union[int, Tuple[int], List[int]]] = None,
keepdims: bool = False,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
out: Optional[ivy.Container] = None,
) -> ivy.Container:
return self.handle_inplace(
self.map(
lambda x_, _: ivy.any(x_, axis, keepdims) if ivy.is_array(x_) else x_,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
),
out,
)
| 30.685185
| 86
| 0.559445
| 182
| 1,657
| 4.928571
| 0.302198
| 0.093645
| 0.056856
| 0.044593
| 0.733556
| 0.733556
| 0.733556
| 0.733556
| 0.733556
| 0.733556
| 0
| 0
| 0.340374
| 1,657
| 53
| 87
| 31.264151
| 0.820677
| 0.063971
| 0
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0
| 1
| 0.045455
| false
| 0
| 0.068182
| 0.045455
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ee2ab3a1e58f62efd6b7e96c958280babd48c94
| 9,178
|
py
|
Python
|
tests/blackbox/limit/test_bb_limit.py
|
EdgeCast/waflz
|
d82c1ff846b6e5a9fc784178156da090996a956f
|
[
"Apache-2.0"
] | 29
|
2021-07-20T08:49:03.000Z
|
2022-03-30T00:00:50.000Z
|
tests/blackbox/limit/test_bb_limit.py
|
EdgeCast/waflz
|
d82c1ff846b6e5a9fc784178156da090996a956f
|
[
"Apache-2.0"
] | 3
|
2021-12-13T09:57:22.000Z
|
2022-03-29T23:56:05.000Z
|
tests/blackbox/limit/test_bb_limit.py
|
EdgeCast/waflz
|
d82c1ff846b6e5a9fc784178156da090996a956f
|
[
"Apache-2.0"
] | 3
|
2021-07-17T14:42:52.000Z
|
2021-07-28T21:27:43.000Z
|
#!/usr/bin/env python3
'''Test limit '''
# ------------------------------------------------------------------------------
# imports
# ------------------------------------------------------------------------------
import pytest
import subprocess
import os
import sys
import json
import time
import requests
import base64
import time
# ------------------------------------------------------------------------------
# constants
# ------------------------------------------------------------------------------
G_TEST_HOST = 'http://127.0.0.1:12345'
# ------------------------------------------------------------------------------
# run_command
# ------------------------------------------------------------------------------
def run_command(command):
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return (p.returncode, stdout, stderr)
# ------------------------------------------------------------------------------
# setup waflz server with scopes
# ------------------------------------------------------------------------------
@pytest.fixture()
def setup_waflz_server():
# ------------------------------------------------------
# setup
# ------------------------------------------------------
l_cwd = os.getcwd()
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_scopes_dir = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes'))
l_conf_dir = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf'))
l_ruleset_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/ruleset'))
l_geoip2city_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-City.mmdb'))
l_geoip2ISP_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-ASN.mmdb'))
l_waflz_server_path = os.path.abspath(os.path.join(l_file_path, '../../../build/util/waflz_server/waflz_server'))
l_subproc = subprocess.Popen([l_waflz_server_path,
'-d', l_conf_dir,
'-b', l_scopes_dir,
'-r', l_ruleset_path,
'-g', l_geoip2city_path,
'-s', l_geoip2ISP_path,
'-L',
'-j'])
print('cmd: \n{}\n'.format(' '.join([l_waflz_server_path,
'-d', l_conf_dir,
'-b', l_scopes_dir,
'-r', l_ruleset_path,
'-g', l_geoip2city_path,
'-s', l_geoip2ISP_path,
'-L',
'-j'])))
time.sleep(1)
# ------------------------------------------------------
# yield...
# ------------------------------------------------------
yield setup_waflz_server
# ------------------------------------------------------
# tear down
# ------------------------------------------------------
l_code, l_out, l_err = run_command('kill -9 %d'%(l_subproc.pid))
time.sleep(0.5)
# ------------------------------------------------------------------------------
# setup waflz server with only limit and geoip db's
# ------------------------------------------------------------------------------
@pytest.fixture()
def setup_waflz_server_limit():
# ------------------------------------------------------
# setup
# ------------------------------------------------------
l_cwd = os.getcwd()
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_limit_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/limit/0053-kobjYva2.limit.json'))
l_geoip2city_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-City.mmdb'))
l_geoip2ISP_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-ASN.mmdb'))
l_waflz_server_path = os.path.abspath(os.path.join(l_file_path, '../../../build/util/waflz_server/waflz_server'))
l_subproc = subprocess.Popen([l_waflz_server_path,
'-l', l_limit_path,
'-g', l_geoip2city_path,
'-s', l_geoip2ISP_path,
'-j'])
print('cmd: \n{}\n'.format(' '.join([l_waflz_server_path,
'-l', l_limit_path,
'-g', l_geoip2city_path,
'-s', l_geoip2ISP_path,
'-j'])))
time.sleep(1)
# ------------------------------------------------------
# yield...
# ------------------------------------------------------
yield setup_waflz_server_limit
# ------------------------------------------------------
# tear down
# ------------------------------------------------------
l_code, l_out, l_err = run_command('kill -9 %d'%(l_subproc.pid))
time.sleep(0.5)
# ------------------------------------------------------------------------------
# Test geo condition group
# ------------------------------------------------------------------------------
def test_geo_condition_group(setup_waflz_server):
# ------------------------------------------------------
# Make 2 request in 2 sec from brazil IP.
# 3rd request should get rate limited
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'limitzgeo.com',
'waf-scopes-id': '0053',
'x-waflz-ip':'200.196.153.102'}
for x in range(2):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'geo ddos enforcement\n'
# Make a request from US ip for the same
# scope during enforcement
# window. Request should get through
l_headers['x-waflz-ip'] = '34.200.39.53'
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
# Change to US ip and make requests above threshold.
# Requests shouldn't get blocked
l_headers['x-waflz-ip'] = '34.200.39.53'
for x in range(5):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
#sleep through enforcement period
time.sleep(2)
# ------------------------------------------------------------------------------
# Test asn condition group
# ------------------------------------------------------------------------------
def test_asn_condition_group(setup_waflz_server):
# ------------------------------------------------------
# Make 2 request in 2 sec from Japan IP.
# 3rd request should get rate limited
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'limitzasn.com',
'waf-scopes-id': '0053',
'x-waflz-ip':'202.32.115.5'}
for x in range(2):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'asn ddos enforcement\n'
# Make a request from US ip for the same
# scope during enforcement
# window. Request should get through
l_headers['x-waflz-ip'] = '34.200.39.53'
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
# Change to US ip and make requests above threshold.
# Requests shouldn't get blocked
l_headers['x-waflz-ip'] = '34.200.39.53'
for x in range(5):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
#sleep through enforcement period
time.sleep(2)
# ------------------------------------------------------------------------------
# Test both geo and asn in single condition group
# ------------------------------------------------------------------------------
def test_asn_and_geo_cg(setup_waflz_server_limit):
# ------------------------------------------------------
# Make 2 request in 2 sec from US IP and ASN 15133.
# 3rd request should get rate limited
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'x-waflz-ip':'192.229.234.2'}
for x in range(2):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
# Sleep through enforcement period
time.sleep(2)
# Make single request again. should go through
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
# Make 4 request from US ip, but from different
# ASN. All requests should go through
l_headers['x-waflz-ip'] = '162.115.42.1'
for x in range(4):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
| 44.338164
| 117
| 0.441708
| 988
| 9,178
| 3.874494
| 0.168016
| 0.013584
| 0.029258
| 0.040752
| 0.806426
| 0.791797
| 0.76907
| 0.753135
| 0.739551
| 0.720742
| 0
| 0.026074
| 0.218566
| 9,178
| 206
| 118
| 44.553398
| 0.507669
| 0.3674
| 0
| 0.731092
| 0
| 0
| 0.136348
| 0.057612
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.05042
| false
| 0
| 0.07563
| 0
| 0.134454
| 0.016807
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
730b90ad75b19940bb1e87e6c98e690303e67603
| 210
|
py
|
Python
|
test/util.py
|
charles-x-chen/pyaem
|
2eecd53d6df3046b1dac2a939cf84e68f1c16a2c
|
[
"MIT"
] | 17
|
2015-02-16T23:51:30.000Z
|
2022-03-22T22:31:26.000Z
|
test/util.py
|
charles-x-chen/pyaem
|
2eecd53d6df3046b1dac2a939cf84e68f1c16a2c
|
[
"MIT"
] | null | null | null |
test/util.py
|
charles-x-chen/pyaem
|
2eecd53d6df3046b1dac2a939cf84e68f1c16a2c
|
[
"MIT"
] | 17
|
2015-01-12T23:26:13.000Z
|
2020-10-05T07:59:00.000Z
|
class HandlersMatcher(object):
def __init__(self, handler_keys):
self.handler_keys = handler_keys
def __eq__(self, handlers):
return handlers.keys().sort() == self.handler_keys.sort()
| 26.25
| 65
| 0.685714
| 25
| 210
| 5.28
| 0.48
| 0.333333
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195238
| 210
| 7
| 66
| 30
| 0.781065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
731c4b803251213116087569b9d53cf04d45d629
| 77
|
py
|
Python
|
test/test_dreem_tools.py
|
jyesselm/dreem-tools
|
7d17b27ef3e196f12cdb3537937ec34b71a2330a
|
[
"MIT"
] | null | null | null |
test/test_dreem_tools.py
|
jyesselm/dreem-tools
|
7d17b27ef3e196f12cdb3537937ec34b71a2330a
|
[
"MIT"
] | null | null | null |
test/test_dreem_tools.py
|
jyesselm/dreem-tools
|
7d17b27ef3e196f12cdb3537937ec34b71a2330a
|
[
"MIT"
] | null | null | null |
"""
Tests for `dreem_tools` module.
"""
import pytest
def test():
pass
| 8.555556
| 31
| 0.623377
| 10
| 77
| 4.7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.220779
| 77
| 8
| 32
| 9.625
| 0.783333
| 0.402597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
732dd257141a4a19ca68f41aa8a021b21a3d90b2
| 7,051
|
py
|
Python
|
AutoDL_ingestion_program/architectures/resnet.py
|
kvr777/autodl_cv2
|
52f186d265061c1465fdc07249f22fa0e03c3478
|
[
"Apache-2.0"
] | 2
|
2019-09-20T14:10:37.000Z
|
2019-11-17T12:58:24.000Z
|
AutoDL_ingestion_program/architectures/resnet.py
|
kvr777/autodl_cv2
|
52f186d265061c1465fdc07249f22fa0e03c3478
|
[
"Apache-2.0"
] | null | null | null |
AutoDL_ingestion_program/architectures/resnet.py
|
kvr777/autodl_cv2
|
52f186d265061c1465fdc07249f22fa0e03c3478
|
[
"Apache-2.0"
] | 2
|
2019-09-23T05:22:10.000Z
|
2020-01-03T12:36:56.000Z
|
import logging
import sys
import torch
import torchvision.models as models
from torch.utils import model_zoo
from torchvision.models.resnet import BasicBlock, model_urls, Bottleneck
import skeleton
formatter = logging.Formatter(fmt='[%(asctime)s %(levelname)s %(filename)s] %(message)s')
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
LOGGER.addHandler(handler)
class ResNet18(models.ResNet):
Block = BasicBlock
def __init__(self, in_channels, num_classes=10, **kwargs):
Block = BasicBlock
super(ResNet18, self).__init__(Block, [2, 2, 2, 2], num_classes=num_classes, **kwargs) # resnet18
if in_channels == 3:
self.stem = torch.nn.Sequential(
skeleton.nn.Normalize(0.5, 0.25, inplace=False),
)
elif in_channels == 1:
self.stem = torch.nn.Sequential(
skeleton.nn.Normalize(0.5, 0.25, inplace=False),
skeleton.nn.CopyChannels(3),
)
else:
self.stem = torch.nn.Sequential(
skeleton.nn.Normalize(0.5, 0.25, inplace=False),
torch.nn.Conv2d(in_channels, 3, kernel_size=3, stride=1, padding=1, bias=False)
)
self.fc = torch.nn.Linear(512 * Block.expansion, num_classes, bias=False)
self._half = False
self._class_normalize = True
def init(self, model_dir, gain=1.):
sd = model_zoo.load_url(model_urls['resnet18'], model_dir=model_dir)
del sd['fc.weight']
del sd['fc.bias']
self.load_state_dict(sd, strict=False)
for idx in range(len(self.stem)):
m = self.stem[idx]
if hasattr(m, 'weight'):
torch.nn.init.xavier_normal_(m.weight, gain=gain)
LOGGER.debug('initialize stem weight')
torch.nn.init.xavier_uniform_(self.fc.weight, gain=gain)
LOGGER.debug('initialize classifier weight')
def forward(self, inputs, targets=None, tau=8.0, reduction='avg'):
inputs = self.stem(inputs)
logits = models.ResNet.forward(self, inputs)
logits /= tau
if targets is None:
return logits
if targets.device != logits.device:
targets = targets.to(device=logits.device)
loss = self.loss_fn(input=logits, target=targets)
if self._class_normalize and isinstance(self.loss_fn, (torch.nn.BCEWithLogitsLoss,
skeleton.nn.BinaryCrossEntropyLabelSmooth)):
pos = (targets == 1).to(logits.dtype)
neg = (targets < 1).to(logits.dtype)
npos = pos.sum()
nneg = neg.sum()
positive_ratio = max(0.1, min(0.9, (npos) / (npos + nneg)))
negative_ratio = max(0.1, min(0.9, (nneg) / (npos + nneg)))
LOGGER.debug('[BCEWithLogitsLoss] positive_ratio:%f, negative_ratio:%f',
positive_ratio, negative_ratio)
normalized_loss = (loss * pos) / positive_ratio
normalized_loss += (loss * neg) / negative_ratio
loss = normalized_loss
if reduction == 'avg':
loss = loss.mean()
elif reduction == 'max':
loss = loss.max()
elif reduction == 'min':
loss = loss.min()
return logits, loss
def half(self):
for module in self.modules():
if len([c for c in module.children()]) > 0:
continue
if not isinstance(module, torch.nn.BatchNorm2d):
module.half()
else:
module.float()
self._half = True
return self
class ResNet34(models.ResNet):
Block = BasicBlock
def __init__(self, in_channels, num_classes=10, **kwargs):
Block = BasicBlock
super(ResNet34, self).__init__(Block, [3, 4, 6, 3], num_classes=num_classes, **kwargs)
if in_channels == 3:
self.stem = torch.nn.Sequential(
skeleton.nn.Normalize(0.5, 0.25, inplace=False),
)
elif in_channels == 1:
self.stem = torch.nn.Sequential(
skeleton.nn.Normalize(0.5, 0.25, inplace=False),
skeleton.nn.CopyChannels(3),
)
else:
self.stem = torch.nn.Sequential(
skeleton.nn.Normalize(0.5, 0.25, inplace=False),
torch.nn.Conv2d(in_channels, 3, kernel_size=3, stride=1, padding=1, bias=False)
)
self.fc = torch.nn.Linear(512 * Block.expansion, num_classes, bias=False)
self._half = False
self._class_normalize = True
def init(self, model_dir, gain=1.):
sd = model_zoo.load_url(model_urls['resnet34'], model_dir=model_dir)
del sd['fc.weight']
del sd['fc.bias']
self.load_state_dict(sd, strict=False)
for idx in range(len(self.stem)):
m = self.stem[idx]
if hasattr(m, 'weight'):
torch.nn.init.xavier_normal_(m.weight, gain=gain)
LOGGER.debug('initialize stem weight')
torch.nn.init.xavier_uniform_(self.fc.weight, gain=gain)
LOGGER.debug('initialize classifier weight')
def forward(self, inputs, targets=None, tau=8.0, reduction='avg'):
inputs = self.stem(inputs)
logits = models.ResNet.forward(self, inputs)
logits /= tau
if targets is None:
return logits
if targets.device != logits.device:
targets = targets.to(device=logits.device)
loss = self.loss_fn(input=logits, target=targets)
if self._class_normalize and isinstance(self.loss_fn, (torch.nn.BCEWithLogitsLoss,
skeleton.nn.BinaryCrossEntropyLabelSmooth)):
pos = (targets == 1).to(logits.dtype)
neg = (targets < 1).to(logits.dtype)
npos = pos.sum()
nneg = neg.sum()
positive_ratio = max(0.1, min(0.9, (npos) / (npos + nneg)))
negative_ratio = max(0.1, min(0.9, (nneg) / (npos + nneg)))
LOGGER.debug('[BCEWithLogitsLoss] positive_ratio:%f, negative_ratio:%f',
positive_ratio, negative_ratio)
normalized_loss = (loss * pos) / positive_ratio
normalized_loss += (loss * neg) / negative_ratio
loss = normalized_loss
if reduction == 'avg':
loss = loss.mean()
elif reduction == 'max':
loss = loss.max()
elif reduction == 'min':
loss = loss.min()
return logits, loss
def half(self):
for module in self.modules():
if len([c for c in module.children()]) > 0:
continue
if not isinstance(module, torch.nn.BatchNorm2d):
module.half()
else:
module.float()
self._half = True
return self
| 35.432161
| 108
| 0.569139
| 830
| 7,051
| 4.712048
| 0.172289
| 0.032217
| 0.019944
| 0.023012
| 0.880082
| 0.866786
| 0.866786
| 0.866786
| 0.866786
| 0.866786
| 0
| 0.022296
| 0.313005
| 7,051
| 198
| 109
| 35.611111
| 0.785095
| 0.001135
| 0
| 0.842767
| 0
| 0
| 0.049425
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050314
| false
| 0
| 0.044025
| 0
| 0.157233
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7340ef22f319f61f4818b5d98377b6315d142ac2
| 348,258
|
py
|
Python
|
tccli/services/ecm/ecm_client.py
|
HS-Gray/tencentcloud-cli
|
3822fcfdfed570fb526fe49abe6793e2f9127f4a
|
[
"Apache-2.0"
] | 47
|
2018-05-31T11:26:25.000Z
|
2022-03-08T02:12:45.000Z
|
tccli/services/ecm/ecm_client.py
|
HS-Gray/tencentcloud-cli
|
3822fcfdfed570fb526fe49abe6793e2f9127f4a
|
[
"Apache-2.0"
] | 23
|
2018-06-14T10:46:30.000Z
|
2022-02-28T02:53:09.000Z
|
tccli/services/ecm/ecm_client.py
|
HS-Gray/tencentcloud-cli
|
3822fcfdfed570fb526fe49abe6793e2f9127f4a
|
[
"Apache-2.0"
] | 22
|
2018-10-22T09:49:45.000Z
|
2022-03-30T08:06:04.000Z
|
# -*- coding: utf-8 -*-
import os
import sys
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError, ClientError, ParamError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.ecm.v20190719 import ecm_client as ecm_client_v20190719
from tencentcloud.ecm.v20190719 import models as models_v20190719
from jmespath import search
import time
from tccli import six
def doReplaceSecurityGroupPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReplaceSecurityGroupPolicyRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ReplaceSecurityGroupPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTaskResult(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTaskResultRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTaskResult(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyModuleName(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyModuleNameRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyModuleName(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRouteTables(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRouteTablesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeRouteTables(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRouteTable(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRouteTableRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateRouteTable(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReplaceRouteTableAssociation(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReplaceRouteTableAssociationRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ReplaceRouteTableAssociation(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyInstancesAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyInstancesAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyInstancesAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResetRoutes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResetRoutesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ResetRoutes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityGroupPolicies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityGroupPoliciesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSecurityGroupPolicies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRunInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RunInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.RunInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCustomImageTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCustomImageTaskRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeCustomImageTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAddresses(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAddressesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeAddresses(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDefaultSubnet(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDefaultSubnetRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDefaultSubnet(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeListenersRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateImageRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeNode(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeNodeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeNode(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doImportImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ImportImageRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ImportImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchDeregisterTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchDeregisterTargetsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.BatchDeregisterTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyLoadBalancerAttributes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyLoadBalancerAttributesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyLoadBalancerAttributes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLoadBalanceTaskStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLoadBalanceTaskStatusRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeLoadBalanceTaskStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeVpcs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeVpcsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeVpcs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchModifyTargetWeight(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchModifyTargetWeightRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.BatchModifyTargetWeight(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeImageRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisassociateSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisassociateSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DisassociateSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteSecurityGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAddressesBandwidth(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAddressesBandwidthRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyAddressesBandwidth(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSubnet(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSubnetRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateSubnet(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResetInstancesMaxBandwidth(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResetInstancesMaxBandwidthRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ResetInstancesMaxBandwidth(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeModule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeModuleRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeModule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doStartInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StartInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.StartInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteListenerRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisassociateAddress(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisassociateAddressRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DisassociateAddress(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateVpc(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateVpcRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateVpc(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAttachDisks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AttachDisksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AttachDisks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAllocateAddresses(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AllocateAddressesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AllocateAddresses(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyModuleDisableWanIp(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyModuleDisableWanIpRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyModuleDisableWanIp(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyListenerRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisassociateInstancesKeyPairs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisassociateInstancesKeyPairsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DisassociateInstancesKeyPairs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAssociateAddress(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AssociateAddressRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AssociateAddress(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDefaultSubnet(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDefaultSubnetRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyDefaultSubnet(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSubnet(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSubnetRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteSubnet(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySecurityGroupAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySecurityGroupAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifySecurityGroupAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchRegisterTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchRegisterTargetsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.BatchRegisterTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeInstancesDeniedActions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeInstancesDeniedActionsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeInstancesDeniedActions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateNetworkInterface(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateNetworkInterfaceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateNetworkInterface(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doStopInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StopInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.StopInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeImportImageOs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeImportImageOsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeImportImageOs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMonthPeakNetwork(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMonthPeakNetworkRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeMonthPeakNetwork(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDetachNetworkInterface(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DetachNetworkInterfaceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DetachNetworkInterface(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResetInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResetInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ResetInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doTerminateDisks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.TerminateDisksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.TerminateDisks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTargetPort(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTargetPortRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyTargetPort(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteNetworkInterface(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteNetworkInterfaceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteNetworkInterface(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteModule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteModuleRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteModule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReleaseIpv6Addresses(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReleaseIpv6AddressesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ReleaseIpv6Addresses(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTargetHealth(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTargetHealthRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTargetHealth(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReplaceRoutes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReplaceRoutesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ReplaceRoutes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRouteConflicts(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRouteConflictsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeRouteConflicts(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDetachDisks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DetachDisksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DetachDisks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisableRoutes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisableRoutesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DisableRoutes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePriceRunInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePriceRunInstanceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribePriceRunInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBaseOverview(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBaseOverviewRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeBaseOverview(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAttachNetworkInterface(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AttachNetworkInterfaceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AttachNetworkInterface(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReleaseAddresses(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReleaseAddressesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ReleaseAddresses(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityGroupPolicies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityGroupPoliciesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteSecurityGroupPolicies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateKeyPair(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateKeyPairRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateKeyPair(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyModuleConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyModuleConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyModuleConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetLoadBalancerSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetLoadBalancerSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.SetLoadBalancerSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeModuleDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeModuleDetailRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeModuleDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLoadBalancer(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLoadBalancerRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteLoadBalancer(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeInstanceTypeConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeInstanceTypeConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeInstanceTypeConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteRoutes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteRoutesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteRoutes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySecurityGroupPolicies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySecurityGroupPoliciesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifySecurityGroupPolicies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySubnetAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySubnetAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifySubnetAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePackingQuotaGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePackingQuotaGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribePackingQuotaGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeNetworkInterfaces(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeNetworkInterfacesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeNetworkInterfaces(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDisks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDisksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateDisks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAssociateSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AssociateSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AssociateSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyImageAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyImageAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyImageAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTaskStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTaskStatusRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTaskStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePeakNetworkOverview(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePeakNetworkOverviewRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribePeakNetworkOverview(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateSecurityGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doImportCustomImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ImportCustomImageRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ImportCustomImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSnapshots(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSnapshotsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSnapshots(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doTerminateInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.TerminateInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.TerminateInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeInstanceVncUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeInstanceVncUrlRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeInstanceVncUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteRouteTable(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteRouteTableRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteRouteTable(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityGroupLimits(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityGroupLimitsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSecurityGroupLimits(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetSecurityGroupForLoadbalancers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetSecurityGroupForLoadbalancersRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.SetSecurityGroupForLoadbalancers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyIpv6AddressesAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyIpv6AddressesAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyIpv6AddressesAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRebootInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RebootInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.RebootInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAssignIpv6Addresses(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AssignIpv6AddressesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AssignIpv6Addresses(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doMigratePrivateIpAddress(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.MigratePrivateIpAddressRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.MigratePrivateIpAddress(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateLoadBalancer(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateLoadBalancerRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateLoadBalancer(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyModuleNetwork(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyModuleNetworkRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyModuleNetwork(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeHaVips(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeHaVipsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeHaVips(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAssignPrivateIpAddresses(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AssignPrivateIpAddressesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AssignPrivateIpAddresses(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteHaVip(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteHaVipRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteHaVip(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAddressQuota(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAddressQuotaRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeAddressQuota(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLoadBalancerListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLoadBalancerListenersRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteLoadBalancerListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteVpc(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteVpcRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteVpc(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSubnets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSubnetsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSubnets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyPrivateIpAddressesAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyPrivateIpAddressesAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyPrivateIpAddressesAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyModuleImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyModuleImageRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyModuleImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResetInstancesPassword(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResetInstancesPasswordRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ResetInstancesPassword(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateModule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateModuleRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateModule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEnableRoutes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EnableRoutesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.EnableRoutes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLoadBalancers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLoadBalancersRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeLoadBalancers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDisks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDisksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDisks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doMigrateNetworkInterface(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.MigrateNetworkInterfaceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.MigrateNetworkInterface(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateListenerRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSnapshots(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSnapshotsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteSnapshots(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRouteTableAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRouteTableAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyRouteTableAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTargetWeight(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTargetWeightRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyTargetWeight(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyModuleSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyModuleSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyModuleSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRoutes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRoutesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateRoutes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyHaVipAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyHaVipAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyHaVipAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateHaVip(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateHaVipRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateHaVip(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyModuleIpDirect(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyModuleIpDirectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyModuleIpDirect(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTargetsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteImageRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityGroupAssociationStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityGroupAssociationStatisticsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSecurityGroupAssociationStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePeakBaseOverview(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePeakBaseOverviewRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribePeakBaseOverview(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAddressAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAddressAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyAddressAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRemovePrivateIpAddresses(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RemovePrivateIpAddressesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.RemovePrivateIpAddresses(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityGroupPolicies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityGroupPoliciesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateSecurityGroupPolicies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyVpcAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.EcmClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyVpcAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyVpcAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20190719": ecm_client_v20190719,
}
MODELS_MAP = {
"v20190719": models_v20190719,
}
ACTION_MAP = {
"ReplaceSecurityGroupPolicy": doReplaceSecurityGroupPolicy,
"DescribeConfig": doDescribeConfig,
"DescribeTaskResult": doDescribeTaskResult,
"ModifyModuleName": doModifyModuleName,
"DescribeRouteTables": doDescribeRouteTables,
"CreateRouteTable": doCreateRouteTable,
"ReplaceRouteTableAssociation": doReplaceRouteTableAssociation,
"ModifyInstancesAttribute": doModifyInstancesAttribute,
"ResetRoutes": doResetRoutes,
"DescribeSecurityGroupPolicies": doDescribeSecurityGroupPolicies,
"RunInstances": doRunInstances,
"DescribeCustomImageTask": doDescribeCustomImageTask,
"DescribeAddresses": doDescribeAddresses,
"DescribeDefaultSubnet": doDescribeDefaultSubnet,
"DescribeListeners": doDescribeListeners,
"CreateImage": doCreateImage,
"DescribeNode": doDescribeNode,
"ImportImage": doImportImage,
"BatchDeregisterTargets": doBatchDeregisterTargets,
"ModifyLoadBalancerAttributes": doModifyLoadBalancerAttributes,
"DescribeLoadBalanceTaskStatus": doDescribeLoadBalanceTaskStatus,
"DescribeVpcs": doDescribeVpcs,
"BatchModifyTargetWeight": doBatchModifyTargetWeight,
"DescribeImage": doDescribeImage,
"DisassociateSecurityGroups": doDisassociateSecurityGroups,
"DeleteSecurityGroup": doDeleteSecurityGroup,
"ModifyAddressesBandwidth": doModifyAddressesBandwidth,
"CreateSubnet": doCreateSubnet,
"ResetInstancesMaxBandwidth": doResetInstancesMaxBandwidth,
"DescribeModule": doDescribeModule,
"StartInstances": doStartInstances,
"DeleteListener": doDeleteListener,
"DisassociateAddress": doDisassociateAddress,
"CreateVpc": doCreateVpc,
"AttachDisks": doAttachDisks,
"AllocateAddresses": doAllocateAddresses,
"ModifyModuleDisableWanIp": doModifyModuleDisableWanIp,
"ModifyListener": doModifyListener,
"DisassociateInstancesKeyPairs": doDisassociateInstancesKeyPairs,
"AssociateAddress": doAssociateAddress,
"ModifyDefaultSubnet": doModifyDefaultSubnet,
"DeleteSubnet": doDeleteSubnet,
"ModifySecurityGroupAttribute": doModifySecurityGroupAttribute,
"BatchRegisterTargets": doBatchRegisterTargets,
"DescribeInstancesDeniedActions": doDescribeInstancesDeniedActions,
"CreateNetworkInterface": doCreateNetworkInterface,
"StopInstances": doStopInstances,
"DescribeImportImageOs": doDescribeImportImageOs,
"DescribeMonthPeakNetwork": doDescribeMonthPeakNetwork,
"DetachNetworkInterface": doDetachNetworkInterface,
"ResetInstances": doResetInstances,
"TerminateDisks": doTerminateDisks,
"ModifyTargetPort": doModifyTargetPort,
"DeleteNetworkInterface": doDeleteNetworkInterface,
"DeleteModule": doDeleteModule,
"ReleaseIpv6Addresses": doReleaseIpv6Addresses,
"DescribeTargetHealth": doDescribeTargetHealth,
"ReplaceRoutes": doReplaceRoutes,
"DescribeRouteConflicts": doDescribeRouteConflicts,
"DetachDisks": doDetachDisks,
"DisableRoutes": doDisableRoutes,
"DescribePriceRunInstance": doDescribePriceRunInstance,
"DescribeBaseOverview": doDescribeBaseOverview,
"AttachNetworkInterface": doAttachNetworkInterface,
"ReleaseAddresses": doReleaseAddresses,
"DeleteSecurityGroupPolicies": doDeleteSecurityGroupPolicies,
"CreateKeyPair": doCreateKeyPair,
"ModifyModuleConfig": doModifyModuleConfig,
"SetLoadBalancerSecurityGroups": doSetLoadBalancerSecurityGroups,
"DescribeModuleDetail": doDescribeModuleDetail,
"DeleteLoadBalancer": doDeleteLoadBalancer,
"DescribeInstanceTypeConfig": doDescribeInstanceTypeConfig,
"DeleteRoutes": doDeleteRoutes,
"ModifySecurityGroupPolicies": doModifySecurityGroupPolicies,
"ModifySubnetAttribute": doModifySubnetAttribute,
"DescribePackingQuotaGroup": doDescribePackingQuotaGroup,
"DescribeNetworkInterfaces": doDescribeNetworkInterfaces,
"CreateDisks": doCreateDisks,
"AssociateSecurityGroups": doAssociateSecurityGroups,
"ModifyImageAttribute": doModifyImageAttribute,
"DescribeTaskStatus": doDescribeTaskStatus,
"DescribePeakNetworkOverview": doDescribePeakNetworkOverview,
"CreateSecurityGroup": doCreateSecurityGroup,
"ImportCustomImage": doImportCustomImage,
"DescribeSnapshots": doDescribeSnapshots,
"TerminateInstances": doTerminateInstances,
"DescribeInstanceVncUrl": doDescribeInstanceVncUrl,
"DeleteRouteTable": doDeleteRouteTable,
"DescribeSecurityGroupLimits": doDescribeSecurityGroupLimits,
"SetSecurityGroupForLoadbalancers": doSetSecurityGroupForLoadbalancers,
"ModifyIpv6AddressesAttribute": doModifyIpv6AddressesAttribute,
"RebootInstances": doRebootInstances,
"AssignIpv6Addresses": doAssignIpv6Addresses,
"MigratePrivateIpAddress": doMigratePrivateIpAddress,
"CreateLoadBalancer": doCreateLoadBalancer,
"ModifyModuleNetwork": doModifyModuleNetwork,
"DescribeHaVips": doDescribeHaVips,
"AssignPrivateIpAddresses": doAssignPrivateIpAddresses,
"DeleteHaVip": doDeleteHaVip,
"DescribeAddressQuota": doDescribeAddressQuota,
"DeleteLoadBalancerListeners": doDeleteLoadBalancerListeners,
"DeleteVpc": doDeleteVpc,
"DescribeSubnets": doDescribeSubnets,
"DescribeInstances": doDescribeInstances,
"DescribeSecurityGroups": doDescribeSecurityGroups,
"ModifyPrivateIpAddressesAttribute": doModifyPrivateIpAddressesAttribute,
"ModifyModuleImage": doModifyModuleImage,
"ResetInstancesPassword": doResetInstancesPassword,
"CreateModule": doCreateModule,
"EnableRoutes": doEnableRoutes,
"DescribeLoadBalancers": doDescribeLoadBalancers,
"DescribeDisks": doDescribeDisks,
"MigrateNetworkInterface": doMigrateNetworkInterface,
"CreateListener": doCreateListener,
"DeleteSnapshots": doDeleteSnapshots,
"ModifyRouteTableAttribute": doModifyRouteTableAttribute,
"ModifyTargetWeight": doModifyTargetWeight,
"ModifyModuleSecurityGroups": doModifyModuleSecurityGroups,
"CreateRoutes": doCreateRoutes,
"ModifyHaVipAttribute": doModifyHaVipAttribute,
"CreateHaVip": doCreateHaVip,
"ModifyModuleIpDirect": doModifyModuleIpDirect,
"DescribeTargets": doDescribeTargets,
"DeleteImage": doDeleteImage,
"DescribeSecurityGroupAssociationStatistics": doDescribeSecurityGroupAssociationStatistics,
"DescribePeakBaseOverview": doDescribePeakBaseOverview,
"ModifyAddressAttribute": doModifyAddressAttribute,
"RemovePrivateIpAddresses": doRemovePrivateIpAddresses,
"CreateSecurityGroupPolicies": doCreateSecurityGroupPolicies,
"ModifyVpcAttribute": doModifyVpcAttribute,
}
AVAILABLE_VERSION_LIST = [
"v20190719",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
if os.environ.get(OptionsDefine.ENV_ROLE_ARN) and os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME):
cred[OptionsDefine.RoleArn] = os.environ.get(OptionsDefine.ENV_ROLE_ARN)
cred[OptionsDefine.RoleSessionName] = os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
elif not g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param.replace('_', '-') in [OptionsDefine.RoleArn, OptionsDefine.RoleSessionName]:
if param.replace('_', '-') in cred:
g_param[param] = cred[param.replace('_', '-')]
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["ecm"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["ecm"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
if g_param[OptionsDefine.Waiter]:
param = eval(g_param[OptionsDefine.Waiter])
if 'expr' not in param:
raise Exception('`expr` in `--waiter` must be defined')
if 'to' not in param:
raise Exception('`to` in `--waiter` must be defined')
if 'timeout' not in param:
if 'waiter' in conf and 'timeout' in conf['waiter']:
param['timeout'] = conf['waiter']['timeout']
else:
param['timeout'] = 180
if 'interval' not in param:
if 'waiter' in conf and 'interval' in conf['waiter']:
param['interval'] = conf['waiter']['interval']
else:
param['timeout'] = 5
param['interval'] = min(param['interval'], param['timeout'])
g_param['OptionsDefine.WaiterInfo'] = param
# 如果在配置文件中读取字段的值,python2中的json.load函数会读取unicode类型的值,因此这里要转化类型
if six.PY2:
for key, value in g_param.items():
if isinstance(value, six.text_type):
g_param[key] = value.encode('utf-8')
return g_param
| 53.479423
| 155
| 0.680386
| 37,288
| 348,258
| 6.128513
| 0.015072
| 0.099641
| 0.303558
| 0.132107
| 0.928286
| 0.927144
| 0.926593
| 0.925744
| 0.92472
| 0.923845
| 0
| 0.003997
| 0.184648
| 348,258
| 6,511
| 156
| 53.487636
| 0.800784
| 0.003965
| 0
| 0.833662
| 0
| 0
| 0.140632
| 0.075927
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021675
| false
| 0.001314
| 0.004598
| 0.000164
| 0.026601
| 0.021346
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b45f4fceedfea9f838857d182d5de9a9223f78e1
| 39,713
|
py
|
Python
|
tests/systemcalc_test.py
|
stevepbyrne/dbus-systemcalc-py
|
4d50ca36af51bbe1e3040cb63f60ef262da5d397
|
[
"MIT"
] | null | null | null |
tests/systemcalc_test.py
|
stevepbyrne/dbus-systemcalc-py
|
4d50ca36af51bbe1e3040cb63f60ef262da5d397
|
[
"MIT"
] | null | null | null |
tests/systemcalc_test.py
|
stevepbyrne/dbus-systemcalc-py
|
4d50ca36af51bbe1e3040cb63f60ef262da5d397
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import json
import unittest
# This adapts sys.path to include all relevant packages
import context
# our own packages
from base import TestSystemCalcBase
# Monkey patching for unit tests
import patches
class TestSystemCalc(TestSystemCalcBase):
def __init__(self, methodName='runTest'):
TestSystemCalcBase.__init__(self, methodName)
def setUp(self):
TestSystemCalcBase.setUp(self)
self._add_device('com.victronenergy.vebus.ttyO1',
product_name='Multi',
values={
'/Ac/ActiveIn/L1/P': 123,
'/Ac/ActiveIn/ActiveInput': 0,
'/Ac/ActiveIn/Connected': 1,
'/Ac/Out/L1/P': 100,
'/Dc/0/Voltage': 12.25,
'/Dc/0/Current': -8,
'/DeviceInstance': 0,
'/Devices/0/Assistants': [0x55, 0x1] + (26 * [0]), # Hub-4 assistant
'/Dc/0/MaxChargeCurrent': None,
'/Soc': 53.2,
'/State': 3,
'/BatteryOperationalLimits/MaxChargeVoltage': None,
'/BatteryOperationalLimits/MaxChargeCurrent': None,
'/BatteryOperationalLimits/MaxDischargeCurrent': None,
'/BatteryOperationalLimits/BatteryLowVoltage': None
})
self._add_device('com.victronenergy.settings',
values={
'/Settings/SystemSetup/AcInput1': 1,
'/Settings/SystemSetup/AcInput2': 2,
})
def test_ac_in_grid(self):
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 1,
'/Ac/Grid/L1/Power': 123,
'/Ac/Grid/L2/Power': None,
'/Ac/Grid/L3/Power': None,
'/Ac/Genset/NumberOfPhases': None,
'/Ac/Consumption/L1/Power': 100,
'/Ac/Consumption/L2/Power': None,
'/Ac/Consumption/L3/Power': None,
'/Ac/ConsumptionOnOutput/L1/Power': 100,
'/Ac/ConsumptionOnOutput/L2/Power': None,
'/Ac/ConsumptionOnOutput/L3/Power': None,
'/Ac/ConsumptionOnInput/L1/Power': 0,
'/Ac/ConsumptionOnInput/L2/Power': None,
'/Ac/ConsumptionOnInput/L3/Power': None
})
def test_ac_in_genset(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/ActiveInput', 1)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 2,
'/Ac/Genset/L1/Power': 123,
'/Ac/Grid/L1/Power': None
})
def test_ac_in_not_available(self):
self._monitor.set_value('com.victronenergy.settings', '/Settings/SystemSetup/AcInput1', 0)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 0,
'/Ac/Grid/NumberOfPhases': None,
'/Ac/Genset/NumberOfPhases': None
})
def test_ac_in_shore(self):
self._monitor.set_value('com.victronenergy.settings', '/Settings/SystemSetup/AcInput1', 3)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 3,
'/Ac/Grid/NumberOfPhases': 1,
'/Ac/Genset/NumberOfPhases': None
})
def test_ac_in_grid_3p(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/L1/P', 100)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/L2/P', 150)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/L3/P', 200)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L1/P', 80)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L2/P', 90)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L3/P', 100)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 1,
'/Ac/Grid/L1/Power': 100,
'/Ac/Grid/L2/Power': 150,
'/Ac/Grid/L3/Power': 200,
'/Ac/Grid/NumberOfPhases': 3,
'/Ac/Genset/L1/Power': None,
'/Ac/Genset/NumberOfPhases': None,
'/Ac/Consumption/L1/Power': 80,
'/Ac/Consumption/L2/Power': 90,
'/Ac/Consumption/L3/Power': 100,
'/Ac/ConsumptionOnOutput/L1/Power': 80,
'/Ac/ConsumptionOnOutput/L2/Power': 90,
'/Ac/ConsumptionOnOutput/L3/Power': 100,
'/Ac/ConsumptionOnInput/L1/Power': 0,
'/Ac/ConsumptionOnInput/L2/Power': 0,
'/Ac/ConsumptionOnInput/L3/Power': 0
})
def test_ac_gridmeter(self):
self._add_device('com.victronenergy.grid.ttyUSB1', {'/Ac/L1/Power': 1230})
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 0
})
self._update_values()
self._check_values({
'/Ac/Grid/L1/Power': 1230,
'/Ac/Grid/NumberOfPhases': 1,
'/Ac/Consumption/L1/Power': 1230 - 123 + 100 + 500,
'/Ac/ConsumptionOnOutput/L1/Power': 100,
'/Ac/ConsumptionOnInput/L1/Power': 1230 - 123 + 500
})
def test_ac_gridmeter_3p(self):
self._add_device('com.victronenergy.grid.ttyUSB1', {
'/Ac/L1/Power': 1230,
'/Ac/L2/Power': 1130,
'/Ac/L3/Power': 1030})
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Ac/L2/Power': 400,
'/Ac/L3/Power': 200,
'/Position': 0
})
self._update_values()
self._check_values({
'/Ac/Grid/L1/Power': 1230,
'/Ac/Grid/L2/Power': 1130,
'/Ac/Grid/L3/Power': 1030,
'/Ac/Grid/NumberOfPhases': 3,
'/Ac/Consumption/L1/Power': 1230 - 123 + 100 + 500,
'/Ac/Consumption/L2/Power': 1130 + 400,
'/Ac/Consumption/L3/Power': 1030 + 200,
'/Ac/ConsumptionOnInput/L1/Power': 1230 - 123 + 500,
'/Ac/ConsumptionOnInput/L2/Power': 1130 + 400,
'/Ac/ConsumptionOnInput/L3/Power': 1030 + 200,
'/Ac/ConsumptionOnOutput/L1/Power': 100,
# It's one phase on output
'/Ac/ConsumptionOnOutput/NumberOfPhases': 1,
'/Ac/ConsumptionOnOutput/L2/Power': None,
'/Ac/ConsumptionOnOutput/L3/Power': None
})
def test_ac_gridmeter_3p_ignore_acout(self):
self._set_setting('/Settings/SystemSetup/HasAcOutSystem', 0)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Hub4/AssistantId', 5)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L1/P', 20)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L2/P', -10)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L3/P', 30)
self._add_device('com.victronenergy.grid.ttyUSB1', {
'/Ac/L1/Power': 1230,
'/Ac/L2/Power': 1130,
'/Ac/L3/Power': 1030})
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Ac/L2/Power': 400,
'/Ac/L3/Power': 200,
'/Position': 0
})
self._update_values()
self._check_values({
'/Ac/Grid/L1/Power': 1230,
'/Ac/Grid/L2/Power': 1130,
'/Ac/Grid/L3/Power': 1030,
'/Ac/Grid/NumberOfPhases': 3,
'/Ac/Consumption/L1/Power': 1230 - 123 + 500,
'/Ac/Consumption/L2/Power': 1130 + 400,
'/Ac/Consumption/L3/Power': 1030 + 200,
'/Ac/ConsumptionOnInput/L1/Power': 1230 - 123 + 500,
'/Ac/ConsumptionOnInput/L2/Power': 1130 + 400,
'/Ac/ConsumptionOnInput/L3/Power': 1030 + 200,
'/Ac/ConsumptionOnOutput/NumberOfPhases': None,
'/Ac/ConsumptionOnOutput/L1/Power': None,
'/Ac/ConsumptionOnOutput/L2/Power': None,
'/Ac/ConsumptionOnOutput/L3/Power': None
})
def test_ac_gridmeter_3p_has_acout_notset(self):
self._set_setting('/Settings/SystemSetup/HasAcOutSystem', 0)
self._monitor.add_value('com.victronenergy.settings', '/Settings/CGwacs/RunWithoutGridMeter', 1)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L1/P', 20)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L2/P', -10)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L3/P', 30)
self._add_device('com.victronenergy.grid.ttyUSB1', {
'/Ac/L1/Power': 1230,
'/Ac/L2/Power': 1130,
'/Ac/L3/Power': 1030})
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Ac/L2/Power': 400,
'/Ac/L3/Power': 200,
'/Position': 0
})
self._update_values()
self._check_values({
'/Ac/Grid/L1/Power': 1230,
'/Ac/Grid/L2/Power': 1130,
'/Ac/Grid/L3/Power': 1030,
'/Ac/Grid/NumberOfPhases': 3,
'/Ac/Consumption/L1/Power': 1230 - 123 + 500 + 20,
'/Ac/Consumption/L2/Power': 1130 + 400,
'/Ac/Consumption/L3/Power': 1030 + 200 + 30,
'/Ac/ConsumptionOnInput/L1/Power': 1230 - 123 + 500,
'/Ac/ConsumptionOnInput/L2/Power': 1130 + 400,
'/Ac/ConsumptionOnInput/L3/Power': 1030 + 200,
'/Ac/ConsumptionOnOutput/NumberOfPhases': 3,
'/Ac/ConsumptionOnOutput/L1/Power': 20,
'/Ac/ConsumptionOnOutput/L2/Power': 0,
'/Ac/ConsumptionOnOutput/L3/Power': 30
})
def test_ac_gridmeter_inactive(self):
self._add_device('com.victronenergy.grid.ttyUSB1', {'/Ac/L1/Power': 1230})
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 0
})
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/ActiveInput', 1)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 2,
'/Ac/Grid/L1/Power': 1230,
'/Ac/Grid/NumberOfPhases': 1,
'/Ac/Consumption/L1/Power': 1230 + 100 + 500,
'/Ac/ConsumptionOnInput/L1/Power': 1230 + 500,
'/Ac/ConsumptionOnOutput/L1/Power': 100,
'/Ac/PvOnGrid/L1/Power': 500
})
def test_pv_on_output(self):
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 1
})
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L1/P', -100)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 1,
'/Ac/Grid/L1/Power': 123,
'/Ac/Grid/NumberOfPhases': 1,
'/Ac/Consumption/L1/Power': 500 - 100,
'/Ac/ConsumptionOnInput/L1/Power': 0,
'/Ac/ConsumptionOnOutput/L1/Power': 500 - 100,
'/Ac/PvOnOutput/L1/Power': 500
})
def test_multiple_pv(self):
self._add_device('com.victronenergy.pvinverter.fronius_122_2313', {
'/Ac/L2/Power': 200,
'/Position': 1
})
self._add_device('com.victronenergy.pvinverter.fronius_122_2314', {
'/Ac/L1/Power': 105,
'/Position': 1
})
self._add_device('com.victronenergy.pvinverter.fronius_122_2315', {
'/Ac/L3/Power': 300,
'/Position': 1
})
self._add_device('com.victronenergy.pvinverter.fronius_122_2316', {
'/Ac/L1/Power': 110,
'/Ac/L3/Power': 200,
'/Position': 1
})
self._add_device('com.victronenergy.pvinverter.fronius_122_2317', {
'/Ac/L1/Power': 120,
'/Ac/L2/Power': 220,
'/Position': 0
})
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L1/P', -100)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 1,
'/Ac/Grid/L1/Power': 123 - 120,
'/Ac/Grid/L2/Power': -220,
'/Ac/Grid/L3/Power': None,
'/Ac/Grid/NumberOfPhases': 2,
'/Ac/Consumption/L1/Power': 105 + 110 - 100,
# No grid meter so assume that are no loads on ac input
'/Ac/ConsumptionOnInput/L1/Power': 0,
'/Ac/ConsumptionOnOutput/L1/Power': 105 + 110 + -100,
'/Ac/PvOnOutput/NumberOfPhases': 3,
'/Ac/PvOnOutput/L1/Power': 105 + 110,
'/Ac/PvOnGrid/L1/Power': 120,
'/Ac/PvOnGrid/L2/Power': 220,
'/Ac/PvOnGrid/L3/Power': None,
'/Ac/PvOnGrid/NumberOfPhases': 2
})
def test_pv_on_input_invalid(self):
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 2
})
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L1/P', -500)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': 1,
'/Ac/Grid/L1/Power': 123,
'/Ac/Grid/NumberOfPhases': 1,
'/Ac/Consumption/L1/Power': 0,
'/Ac/ConsumptionOnInput/L1/Power': 0,
'/Ac/ConsumptionOnOutput/L1/Power': 0,
'/Ac/PvOnGenset/L1/Power': 500
})
def test_solar_charger_no_load_output(self):
self._add_device('com.victronenergy.solarcharger.ttyO1', {
'/Dc/0/Voltage': 12,
'/Dc/0/Current': 8,
})
self._update_values()
self._check_values({
'/Dc/System/Power': None,
'/Dc/Pv/Power': 12 * 8})
self._add_device('com.victronenergy.solarcharger.ttyO2', {
'/Dc/0/Voltage': 12.5,
'/Dc/0/Current': 10,
})
self._update_values()
self._check_values({
'/Dc/System/Power': None,
'/Dc/Pv/Power': (12 * 8) + (12.5 * 10)})
def test_solar_charger_with_load_output(self):
self._add_device('com.victronenergy.solarcharger.ttyO1', {
'/Dc/0/Voltage': 12,
'/Dc/0/Current': 8,
'/Load/I': 5
})
self._update_values()
self._check_values({
'/Dc/System/Power': None,
'/Dc/Pv/Power': 12 * (8 + 5)})
self._add_device('com.victronenergy.solarcharger.ttyO2', {
'/Dc/0/Voltage': 12.5,
'/Dc/0/Current': 10,
'/Load/I': 5
})
self._update_values()
self._check_values({
'/Dc/System/Power': None,
'/Dc/Pv/Power': 12 * (8 + 5) + 12.5 * (10 + 5)})
self._set_setting('/Settings/SystemSetup/HasDcSystem', 1)
self._update_values()
self._check_values({
'/Dc/System/Power': 12 * 5 + 12.5 * 5,
'/Dc/Pv/Power': 12 * (8 + 5) + 12.5 * (10 + 5)})
def test_rs_smart_pv(self):
self._add_device('com.victronenergy.solarcharger.ttyO1', {
'/Dc/0/Voltage': 12,
'/Dc/0/Current': 8,
})
self._add_device('com.victronenergy.inverter.ttyO1',
product_name='inverter',
values={
'/Dc/0/Voltage': 12.8,
'/Ac/Out/L1/V': 220,
'/Ac/Out/L1/I': 1.0,
'/Yield/Power': 102
})
self._update_values()
self._check_values({
'/Dc/Pv/Power': 102 + 12 * 8})
def test_multi_dc_power(self):
self._update_values()
self._check_values({
'/Dc/Vebus/Current': -8,
'/Dc/Vebus/Power': -8 * 12.25})
def test_multi_dc_power_2(self):
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Power', -98.7)
self._update_values()
self._check_values({
'/Dc/Vebus/Current': -8,
'/Dc/Vebus/Power': -98.7})
def test_multi_dc_power_3(self):
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Power', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Current', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Voltage', None)
self._update_values()
self._check_values({
'/Dc/Vebus/Current': None,
'/Dc/Vebus/Power': None})
def test_multi_dc_power_4(self):
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Power', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Current', 6.5)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Voltage', None)
self._update_values()
self._check_values({
'/Dc/Vebus/Current': 6.5,
'/Dc/Vebus/Power': None})
def test_dc_charger_battery(self):
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Load/I': 5,
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 5.6,
'/Dc/0/Power': 120})
self._set_setting('/Settings/SystemSetup/HasDcSystem', 1)
self._update_values()
self._check_values({
'/Dc/System/Power': 12.4 * 9.7 - 120 - 12.25 * 8 + 12.4 * 5,
'/Dc/Battery/Power': 120,
'/Dc/Pv/Power': 12.4 * (9.7 + 5)})
def test_hub1(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Mgmt/Connection', "VE.Bus")
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Hub/ChargeVoltage', 0)
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Load/I': 5,
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._add_device('com.victronenergy.solarcharger.ttyO2',
product_name='solarcharger',
values={
'/Load/I': 5,
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': 5.6})
self._update_values()
self._check_values({
'/Hub': 1,
'/SystemType': 'Hub-1',
'/Dc/Pv/Power': 12.4 * (9.7 + 5) + 12.3 * (5.6 + 5)})
def test_hub1_vecan(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Mgmt/Connection', "VE.Can")
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._update_values()
self._check_values({
'/Hub': 1,
'/SystemType': 'Hub-1',
'/Dc/Pv/Power': 12.4 * 9.7})
def test_hub2(self):
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 1
})
self._update_values()
self._check_values({
'/Hub': 2,
'/SystemType': 'Hub-2',
'/Ac/PvOnOutput/L1/Power': 500})
def test_hub3_grid(self):
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 0
})
self._update_values()
self._check_values({
'/Hub': 3,
'/SystemType': 'Hub-3',
'/Ac/PvOnGrid/L1/Power': 500,
'/Ac/Grid/L1/Power': 123 - 500,
'/Ac/Genset/L1/Power': None})
def test_hub3_genset(self):
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 2
})
self._update_values()
self._check_values({
'/Hub': 3,
'/SystemType': 'Hub-3',
'/Ac/PvOnGenset/L1/Power': 500,
'/Ac/Grid/L1/Power': 123,
'/Ac/Genset/L1/Power': -500})
def test_hub4_pv(self):
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Hub4/AssistantId', 3)
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 2
})
self._update_values()
self._check_values({
'/Hub': 4,
'/SystemType': 'Hub-4',
'/Ac/PvOnGenset/L1/Power': 500})
def test_ess_pv(self):
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Hub4/AssistantId', 5)
self._update_values()
self._check_values({
'/Hub': 4,
'/SystemType': 'ESS'})
def test_hub4_missing_pv(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L1/P', -500)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/L1/P', -500)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Hub4/AssistantId', 3)
self._add_device('com.victronenergy.grid.ttyUSB1', {'/Ac/L1/Power': -300})
self._update_values()
self._check_values({
'/Hub': 4,
'/SystemType': 'Hub-4',
'/Ac/Consumption/L1/Power': 200,
'/Ac/ConsumptionOnInput/L1/Power': 200,
'/Ac/ConsumptionOnOutput/L1/Power': 0
})
def test_hub4_charger(self):
self._add_device('com.victronenergy.solarcharger.ttyO1', {
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7
})
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Hub4/AssistantId', 3)
self._update_values()
self._check_values({
'/Hub': 4,
'/SystemType': 'Hub-4',
'/Dc/Pv/Power': 12.4 * 9.7})
def test_serial(self):
self._update_values()
s = self._service['/Serial']
self.assertEqual(len(s), 12)
# Check if 's' is a hex string, if not an exception should be raised, causing the test to fail.
self.assertIsNotNone(int(s, 16))
def test_dc_current_from_power(self):
self._update_values()
self._set_setting('/Settings/SystemSetup/BatteryService', 'nobattery')
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Voltage', 0)
self._update_values()
self._check_values({
'/Dc/Battery/Current': None,
'/Dc/Battery/Voltage': 0,
'/Dc/Battery/Power': 0})
def test_battery_selection(self):
self._update_values()
self._set_setting('/Settings/SystemSetup/BatteryService', 'com.victronenergy.vebus/0')
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 53.2,
'/Dc/Battery/Current': -8,
'/Dc/Battery/Power': -8 * 12.25,
'/Dc/Battery/Voltage': 12.25,
'/ActiveBatteryService': 'com.victronenergy.vebus/0'})
def test_battery_selection_default(self):
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 53.2,
'/Dc/Battery/Current': -8,
'/Dc/Battery/Power': -8 * 12.25,
'/Dc/Battery/Voltage': 12.25,
'/ActiveBatteryService': 'com.victronenergy.vebus/0'})
def test_battery_selection_solarcharger(self):
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._add_device('com.victronenergy.solarcharger.ttyO2',
product_name='solarcharger',
values={
'/Load/I': 5,
'/Dc/0/Voltage': 12.5,
'/Dc/0/Current': 10})
self._update_values()
self._check_values({
'/Dc/Battery/Soc': None,
'/Dc/Battery/Current': (12.4 * 9.7 + 12.5 * 10 - 12.25 * 8) / 12.25,
'/Dc/Battery/Power': 12.4 * 9.7 + 12.5 * 10 - 12.25 * 8,
'/Dc/Battery/Voltage': 12.25,
'/ActiveBatteryService': None})
def test_battery_selection_solarcharger_no_vebus_voltage(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Voltage', None)
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._update_values()
self._check_values({
'/Dc/Battery/Soc': None,
'/Dc/Battery/Current': 9.7 * 12.4 / 12.4,
'/Dc/Battery/Power': 9.7 * 12.4,
'/Dc/Battery/Voltage': 12.4,
'/ActiveBatteryService': None})
def test_battery_selection_solarcharger_no_voltage(self):
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': None,
'/Dc/0/Current': None})
self._update_values()
self._check_values({
'/Dc/Battery/Soc': None,
'/Dc/Battery/Current': -8,
'/Dc/Battery/Power': - 12.25 * 8,
'/Dc/Battery/Voltage': 12.25,
'/ActiveBatteryService': None})
def test_battery_selection_inverter(self):
self._set_setting('/Settings/SystemSetup/BatteryService', 'nobattery')
self._add_device('com.victronenergy.inverter.ttyO1',
product_name='inverter',
values={
'/Dc/0/Voltage': 12.8,
'/Ac/Out/L1/V': 230,
'/Ac/Out/L1/I': 1.0 })
self._update_values()
# The vebus is still preferred
self._check_values({
'/Dc/Battery/Voltage': 12.25,
'/Dc/Battery/VoltageService': 'com.victronenergy.vebus.ttyO1'})
# ... but if the vebus is not suitable... use the first vedirect inverter
self._remove_device('com.victronenergy.vebus.ttyO1')
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.8,
'/Dc/Battery/VoltageService': 'com.victronenergy.inverter.ttyO1',
'/Ac/Consumption/L1/Power': 230,
'/Ac/ConsumptionOnOutput/L1/Power': 230,
'/Ac/Consumption/NumberOfPhases': 1})
def test_battery_selection_inverter_with_soc(self):
self._add_device('com.victronenergy.inverter.ttyO1',
product_name='inverter',
values={
'/Dc/0/Voltage': 12.8,
'/Ac/Out/L1/V': 230,
'/Ac/Out/L1/I': 1.0,
'/Soc': 55})
self._update_values()
# The vebus is still preferred
self._check_values({
'/Dc/Battery/Voltage': 12.25,
'/Dc/Battery/Soc': 53.2,
'/Dc/Battery/VoltageService': 'com.victronenergy.vebus.ttyO1',
'/ActiveBatteryService': 'com.victronenergy.vebus/0'})
# ... but if the vebus is not suitable... use the first vedirect inverter
self._remove_device('com.victronenergy.vebus.ttyO1')
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.8,
'/Dc/Battery/Soc': 55,
'/ActiveBatteryService': 'com.victronenergy.inverter/0',
'/Dc/Battery/VoltageService': 'com.victronenergy.inverter.ttyO1'})
def test_battery_selection_solarcharger_extra_current(self):
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/ExtraBatteryCurrent', 0)
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Load/I': 5,
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._add_device('com.victronenergy.solarcharger.ttyO2',
product_name='solarcharger',
values={
'/Load/I': 10,
'/Dc/0/Voltage': 12,
'/Dc/0/Current': 10})
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 53.2,
'/Dc/Battery/Current': (12.4 * 9.7 + 12 * 10 - 12.25 * 8) / 12.25,
'/Dc/Battery/Power': 12.4 * 9.7 + 12 * 10 - 12.25 * 8,
'/Dc/Battery/Voltage': 12.25,
'/ActiveBatteryService': 'com.victronenergy.vebus/0'})
self.assertEqual(9.7 + 10, self._monitor.get_value('com.victronenergy.vebus.ttyO1', '/ExtraBatteryCurrent'))
def test_battery_selection_no_battery(self):
self._update_values()
self._set_setting('/Settings/SystemSetup/BatteryService', 'nobattery')
self._update_values()
self._check_values({
'/Dc/Battery/Soc': None,
'/Dc/Battery/Current': -8,
'/Dc/Battery/Power': -8 * 12.25,
'/Dc/Battery/Voltage': 12.25,
'/ActiveBatteryService': None})
def test_battery_no_battery2(self):
self._update_values()
self._set_setting('/Settings/SystemSetup/BatteryService', 'com.victronenergy.battery/2')
self._set_setting('/Settings/SystemSetup/HasDcSystem', 1)
self._update_values()
self._check_values({
'/Dc/System/Power': None,
'/Dc/Battery/Power': None,
'/ActiveBatteryService': None})
def test_battery_selection_wrong_format(self):
self._set_setting('/Settings/SystemSetup/BatteryService', 'wrong format')
self._update_values()
available_measurements = json.loads(self._service['/AvailableBatteryServices'])
self.assertEqual(len(available_measurements), 3)
self.assertEqual(available_measurements['default'], 'Automatic')
self.assertEqual(available_measurements['nobattery'], 'No battery monitor')
self.assertEqual(available_measurements['com.victronenergy.vebus/0'], 'Multi on dummy')
self._check_values({'/AutoSelectedBatteryService': None})
def test_battery_no_battery_power(self):
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12,
'/Dc/0/Current': 10})
self._add_device('com.victronenergy.solarcharger.ttyO2',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.5,
'/Dc/0/Current': 20})
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': None,
'/Dc/0/Current': None,
'/Dc/0/Power': None,
'/DeviceInstance': 2})
self._set_setting('/Settings/SystemSetup/BatteryService', 'com.victronenergy.battery/2')
self._set_setting('/Settings/SystemSetup/HasDcSystem', 1)
self._update_values()
self._check_values({
'/Dc/System/Power': None,
'/Dc/Battery/Power': None,
'/Dc/Pv/Power': 12 * 10 + 12.5 * 20,
'/ActiveBatteryService': 'com.victronenergy.battery/2'})
def test_removed_services(self):
# Sometimes a service is removed while systemcalc is doing its calculations. Net result is that
# the D-Bus monitor will return None on items that were part of the service. This happens if the
# service disappears after a list of services is retrieved and before values from services in that
# list are used.
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/L1/P', None)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/L2/P', None)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/L3/P', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L1/P', None)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L2/P', None)
self._monitor.add_value('com.victronenergy.vebus.ttyO1', '/Ac/Out/L3/P', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Ac/ActiveIn/ActiveInput', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Voltage', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Dc/0/Current', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Soc', None)
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/State', None)
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': None,
'/Position': None
})
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': None,
'/Dc/0/Current': None})
self._add_device('com.victronenergy.charger.ttyUSB2',
product_name='charger',
values={
'/Dc/0/Voltage': None,
'/Dc/0/Current': None})
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': None,
'/Dc/0/Current': None,
'/Dc/0/Power': None})
self._set_setting('/Settings/SystemSetup/BatteryService', 'com.victronenergy.vebus/0')
self._set_setting('/Settings/SystemSetup/HasDcSystem', 1)
self._update_values()
self._check_values({
'/Ac/ActiveIn/Source': None,
'/Ac/Grid/L1/Power': None,
'/Ac/Grid/L2/Power': None,
'/Ac/Grid/L3/Power': None,
'/Ac/Genset/NumberOfPhases': None,
'/Ac/Consumption/NumberOfPhases': None,
'/Ac/ConsumptionOnInput/NumberOfPhases': None,
'/Ac/ConsumptionOnOutput/NumberOfPhases': None,
'/Ac/PvOnOutput/NumberOfPhases': None
})
def test_disconnected_vebus_is_ignored_in_auto_mode(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/Connected', 0)
self._update_values()
self._check_values({
'/Dc/Battery/Soc': None,
'/Dc/Battery/Voltage': 12.25})
def test_connected_vebus_is_auto_selected(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/State', 0)
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 53.2,
'/Dc/Battery/Voltage': 12.25,
'/Dc/Battery/Current': -8,
'/Dc/Battery/Power': -98,
'/AutoSelectedBatteryService': 'Multi on dummy'})
def test_onlybattery_defaultsetting(self):
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': 5.3,
'/Dc/0/Power': 65,
'/Soc': 15.3,
'/DeviceInstance': 2})
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 15.3,
'/Dc/Battery/Voltage': 12.3,
'/Dc/Battery/Current': 5.3,
'/Dc/Battery/Power': 65,
'/AutoSelectedBatteryService': 'battery on dummy'})
def test_batteryandvebus_defaultsetting(self):
self._monitor.set_value('com.victronenergy.vebus.ttyO1', '/State', 0)
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 53.2,
'/AutoSelectedBatteryService': 'Multi on dummy'})
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': 5.3,
'/Dc/0/Power': 65,
'/Soc': 15.3,
'/DeviceInstance': 2})
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 15.3,
'/AutoSelectedBatteryService': 'battery on dummy'})
self._monitor.remove_service('com.victronenergy.battery.ttyO2')
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 53.2,
'/AutoSelectedBatteryService': 'Multi on dummy'})
def test_battery_voltage_vebus(self):
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.25})
def test_battery_voltage_solarcharger(self):
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.25})
def test_battery_voltage_charger(self):
self._monitor.remove_service('com.victronenergy.vebus.ttyO1')
self._add_device('com.victronenergy.charger.ttyO1',
product_name='charger',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.4})
def test_battery_voltage_sequence(self):
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.25})
self._monitor.remove_service('com.victronenergy.vebus.ttyO1')
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.4})
self._add_device('com.victronenergy.charger.ttyO1',
product_name='charger',
values={
'/Dc/0/Voltage': 12.7,
'/Dc/0/Current': 6.3})
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.4})
self._monitor.remove_service('com.victronenergy.solarcharger.ttyO1')
self._update_values()
self._check_values({
'/Dc/Battery/Voltage': 12.7})
def test_do_not_autoselect_vebus_soc_when_charger_is_present(self):
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 53.2})
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._update_values()
self._check_values({
'/Dc/Battery/Soc': None})
self._add_device('com.victronenergy.charger.ttyO1',
product_name='charger',
values={
'/Dc/0/Voltage': 12.7,
'/Dc/0/Current': 6.3})
self._update_values()
self._check_values({
'/Dc/Battery/Soc': None})
self._monitor.remove_service('com.victronenergy.charger.ttyO1')
self._update_values()
self._check_values({
'/Dc/Battery/Soc': None})
self._monitor.remove_service('com.victronenergy.solarcharger.ttyO1')
self._update_values()
self._check_values({
'/Dc/Battery/Soc': 53.2})
def test_when_hasdcsystem_is_disabled_system_should_be_invalid(self):
self._set_setting('/Settings/SystemSetup/HasDcSystem', 0)
self._update_values()
self._check_values({
'/Dc/System/Power': None})
def test_calculation_of_dc_system(self):
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': 5.3,
'/Dc/0/Power': 65,
'/Soc': 15.3,
'/DeviceInstance': 2})
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7,
'/Load/I': 5})
self._add_device('com.victronenergy.solarcharger.ttyO2',
product_name='solarcharger',
values={
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': 10,
'/Load/I': 11})
self._add_device('com.victronenergy.charger.ttyO1',
product_name='charger',
values={
'/Dc/0/Voltage': 12.7,
'/Dc/0/Current': 6.3})
self._add_device('com.victronenergy.charger.ttyO4',
product_name='charger',
values={
'/Dc/0/Voltage': 12.9,
'/Dc/0/Current': 6})
self._set_setting('/Settings/SystemSetup/HasDcSystem', 1)
self._update_values()
self._check_values({
'/Dc/System/Power': 12.4 * (9.7 + 5) + 12.3 * (10 + 11) + 12.7 * 6.3 + 12.9 * 6 - 12.25 * 8 - 65})
self._monitor.remove_service('com.victronenergy.battery.ttyO2')
self._update_values()
self._check_values({
'/Dc/System/Power': 12.4 * 5 + 12.3 * 11})
def test_dc_system_estimate_with_inverter(self):
self._remove_device('com.victronenergy.vebus.ttyO1')
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': -18.7,
'/Dc/0/Power': -230,
'/Soc': 15.3,
'/DeviceInstance': 2})
self._set_setting('/Settings/SystemSetup/HasDcSystem', 1)
self._update_values()
self._check_values({
'/Dc/System/Power': 230 })
self._add_device('com.victronenergy.inverter.ttyO1',
product_name='inverter',
values={
'/Dc/0/Voltage': 12.8,
'/Ac/Out/L1/V': 220,
'/Ac/Out/L1/I': 1.0 })
self._update_values()
self._check_values({
'/Dc/System/Power': 10 })
def test_battery_state(self):
self._check_values({
'/Dc/Battery/State': None})
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': 5.3,
'/Dc/0/Power': 65,
'/Soc': 15.3,
'/DeviceInstance': 2})
self._monitor.set_value('com.victronenergy.battery.ttyO2', '/Dc/0/Power', 40)
self._update_values()
self._check_values({
'/Dc/Battery/State': 1})
self._monitor.set_value('com.victronenergy.battery.ttyO2', '/Dc/0/Power', -40)
self._update_values()
self._check_values({
'/Dc/Battery/State': 2})
self._monitor.set_value('com.victronenergy.battery.ttyO2', '/Dc/0/Power', 1)
self._update_values()
self._check_values({
'/Dc/Battery/State': 0})
def test_derive_battery(self):
self._add_device('com.victronenergy.solarcharger.ttyO1',
product_name='solarcharger',
values={
'/Load/I': 5,
'/Dc/0/Voltage': 12.4,
'/Dc/0/Current': 9.7})
self._add_device('com.victronenergy.charger.ttyO1',
product_name='charger',
values={
'/Dc/0/Voltage': 12.7,
'/Dc/0/Current': 6.3})
self._set_setting('/Settings/SystemSetup/HasDcSystem', 0)
self._update_values()
self._check_values({
'/Dc/Battery/Power': 12.4 * 9.7 + 12.7 * 6.3 - 12.25 * 8,
'/Dc/Battery/Current': (12.4 * 9.7 + 12.7 * 6.3 - 12.25 * 8) / 12.25,
'/Dc/Battery/Voltage': 12.25})
def test_available_battery_measurement(self):
self._update_values()
available_measurements = self._service['/AvailableBatteryMeasurements']
self.assertEqual(len(available_measurements), 3)
self.assertEqual(available_measurements['default'], 'Automatic')
self.assertEqual(available_measurements['nobattery'], 'No battery monitor')
self.assertEqual(available_measurements['com_victronenergy_vebus_0/Dc/0'], 'Multi on dummy')
self._check_values({'/AutoSelectedBatteryMeasurement': 'com_victronenergy_vebus_0/Dc/0'})
def test_available_battery_measurement_2(self):
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': 5.3,
'/Dc/0/Power': 65,
'/Soc': 15.3,
'/DeviceInstance': 2})
self._update_values()
available_measurements = self._service['/AvailableBatteryMeasurements']
self.assertEqual(len(available_measurements), 4)
self.assertEqual(available_measurements['com_victronenergy_battery_2/Dc/0'], 'battery on dummy')
self._check_values({'/AutoSelectedBatteryMeasurement': 'com_victronenergy_battery_2/Dc/0'})
def test_available_battery_measurement_3(self):
self._update_values()
available_measurements = self._service['/AvailableBatteryMeasurements']
self.assertEqual(len(available_measurements), 3)
self._add_device('com.victronenergy.battery.ttyO2',
product_name='battery',
values={
'/Dc/0/Voltage': 12.3,
'/Dc/0/Current': 5.3,
'/Dc/0/Power': 65,
'/Soc': 15.3,
'/DeviceInstance': 2})
self._update_values()
available_measurements = self._service['/AvailableBatteryMeasurements']
self.assertEqual(len(available_measurements), 4)
self.assertEqual(available_measurements['com_victronenergy_battery_2/Dc/0'], 'battery on dummy')
self._check_values({'/AutoSelectedBatteryMeasurement': 'com_victronenergy_battery_2/Dc/0'})
def test_pv_inverter_ids_empty(self):
self._update_values()
self.assertEqual([], self._service['/PvInvertersProductIds'])
def test_pv_inverter_ids(self):
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 0,
'/ProductId': 0xB0FE
})
self._add_device('com.victronenergy.pvinverter.fronius_122_2311', {
'/Ac/L1/Power': 500,
'/Position': 0,
'/ProductId': 0xB0FF
})
self._update_values()
self.assertEqual([0xB0FE, 0xB0FF], self._service['/PvInvertersProductIds'])
def test_pv_inverter_ids_identical(self):
self._add_device('com.victronenergy.pvinverter.fronius_122_2312', {
'/Ac/L1/Power': 500,
'/Position': 0,
'/ProductId': 0xB0FE
})
self._add_device('com.victronenergy.pvinverter.fronius_122_2311', {
'/Ac/L1/Power': 500,
'/Position': 0,
'/ProductId': 0xB0FE
})
self._update_values()
self.assertEqual([0xB0FE], self._service['/PvInvertersProductIds'])
if __name__ == '__main__':
unittest.main()
| 33.260469
| 110
| 0.66968
| 5,279
| 39,713
| 4.842584
| 0.058723
| 0.102644
| 0.055077
| 0.062588
| 0.85327
| 0.820138
| 0.798427
| 0.76819
| 0.744211
| 0.707558
| 0
| 0.060304
| 0.148591
| 39,713
| 1,193
| 111
| 33.288349
| 0.695759
| 0.020497
| 0
| 0.719547
| 0
| 0
| 0.424299
| 0.259779
| 0
| 0
| 0.00126
| 0
| 0.017941
| 1
| 0.063267
| false
| 0
| 0.004721
| 0
| 0.068933
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81fdcdfaff7e580618d068af88d6157b92336881
| 12,411
|
py
|
Python
|
onnx/backend/test/case/node/convtranspose.py
|
rajeevsrao/onnx
|
355a4954ea4e5836a5e943589509951c44feb6b4
|
[
"MIT"
] | 137
|
2020-04-28T12:28:32.000Z
|
2022-03-18T10:48:25.000Z
|
onnx/backend/test/case/node/convtranspose.py
|
rajeevsrao/onnx
|
355a4954ea4e5836a5e943589509951c44feb6b4
|
[
"MIT"
] | 24
|
2020-05-06T08:06:42.000Z
|
2021-12-31T07:46:13.000Z
|
onnx/backend/test/case/node/convtranspose.py
|
rajeevsrao/onnx
|
355a4954ea4e5836a5e943589509951c44feb6b4
|
[
"MIT"
] | 24
|
2020-05-06T11:43:22.000Z
|
2022-03-18T10:50:35.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np # type: ignore
import onnx
from ..base import Base
from . import expect
class ConvTranspose(Base):
@staticmethod
def export(): # type: () -> None
x = np.array([[[[0., 1., 2.], # (1, 1, 3, 3)
[3., 4., 5.],
[6., 7., 8.]]]]).astype(np.float32)
W = np.array([[[[1., 1., 1.], # (1, 2, 3, 3)
[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]]]]).astype(np.float32)
node = onnx.helper.make_node("ConvTranspose", ["X", "W"], ["Y"])
y = np.array([[[[0., 1., 3., 3., 2.], # (1, 2, 5, 5)
[3., 8., 15., 12., 7.],
[9., 21., 36., 27., 15.],
[9., 20., 33., 24., 13.],
[6., 13., 21., 15., 8.]],
[[0., 1., 3., 3., 2.],
[3., 8., 15., 12., 7.],
[9., 21., 36., 27., 15.],
[9., 20., 33., 24., 13.],
[6., 13., 21., 15., 8.]]]]).astype(np.float32)
expect(node, inputs=[x, W], outputs=[y], name='test_convtranspose')
@staticmethod
def export_convtranspose_1d(): # type: () -> None
x = np.array([[[0., 1., 2.]]]).astype(np.float32) # (1, 1, 3)
W = np.array([[[1., 1., 1.], # (1, 2, 3)
[1., 1., 1.]]]).astype(np.float32)
node = onnx.helper.make_node("ConvTranspose", ["X", "W"], ["Y"])
y = np.array([[[0., 1., 3., 3., 2.], # (1, 2, 5)
[0., 1., 3., 3., 2.]]]).astype(np.float32)
expect(node, inputs=[x, W], outputs=[y], name='test_convtranspose_1d')
@staticmethod
def export_convtranspose_3d(): # type: () -> None
x = np.array([[[[[0., 1., 2., 3., 4.], # (1, 1, 3, 4, 5)
[5., 6., 7., 8., 9.],
[10., 11., 12., 13., 14.],
[15., 16., 17., 18., 19.]],
[[20., 21., 22., 23., 24.],
[25., 26., 27., 28., 29.],
[30., 31., 32., 33., 34.],
[35., 36., 37., 38., 39.]],
[[40., 41., 42., 43., 44.],
[45., 46., 47., 48., 49.],
[50., 51., 52., 53., 54.],
[55., 56., 57., 58., 59.]]]]]).astype(np.float32)
W = np.array([[[[[1., 1., 1.], # (1, 2, 3, 3, 3)
[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]]],
[[[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]]]]]).astype(np.float32)
node = onnx.helper.make_node("ConvTranspose", ["X", "W"], ["Y"])
y = np.array([[[[[0., 1., 3., 6., 9., 7., 4.], # (1, 2, 5, 6, 7)
[5., 12., 21., 27., 33., 24., 13.],
[15., 33., 54., 63., 72., 51., 27.],
[30., 63., 99., 108., 117., 81., 42.],
[25., 52., 81., 87., 93., 64., 33.],
[15., 31., 48., 51., 54., 37., 19.]],
[[20., 42., 66., 72., 78., 54., 28.],
[50., 104., 162., 174., 186., 128., 66.],
[90., 186., 288., 306., 324., 222., 114.],
[120., 246., 378., 396., 414., 282., 144.],
[90., 184., 282., 294., 306., 208., 106.],
[50., 102., 156., 162., 168., 114., 58.]],
[[60., 123., 189., 198., 207., 141., 72.],
[135., 276., 423., 441., 459., 312., 159.],
[225., 459., 702., 729., 756., 513., 261.],
[270., 549., 837., 864., 891., 603., 306.],
[195., 396., 603., 621., 639., 432., 219.],
[105., 213., 324., 333., 342., 231., 117.]],
[[60., 122., 186., 192., 198., 134., 68.],
[130., 264., 402., 414., 426., 288., 146.],
[210., 426., 648., 666., 684., 462., 234.],
[240., 486., 738., 756., 774., 522., 264.],
[170., 344., 522., 534., 546., 368., 186.],
[90., 182., 276., 282., 288., 194., 98.]],
[[40., 81., 123., 126., 129., 87., 44.],
[85., 172., 261., 267., 273., 184., 93.],
[135., 273., 414., 423., 432., 291., 147.],
[150., 303., 459., 468., 477., 321., 162.],
[105., 212., 321., 327., 333., 224., 113.],
[55., 111., 168., 171., 174., 117., 59.]]],
[[[0., 1., 3., 6., 9., 7., 4.],
[5., 12., 21., 27., 33., 24., 13.],
[15., 33., 54., 63., 72., 51., 27.],
[30., 63., 99., 108., 117., 81., 42.],
[25., 52., 81., 87., 93., 64., 33.],
[15., 31., 48., 51., 54., 37., 19.]],
[[20., 42., 66., 72., 78., 54., 28.],
[50., 104., 162., 174., 186., 128., 66.],
[90., 186., 288., 306., 324., 222., 114.],
[120., 246., 378., 396., 414., 282., 144.],
[90., 184., 282., 294., 306., 208., 106.],
[50., 102., 156., 162., 168., 114., 58.]],
[[60., 123., 189., 198., 207., 141., 72.],
[135., 276., 423., 441., 459., 312., 159.],
[225., 459., 702., 729., 756., 513., 261.],
[270., 549., 837., 864., 891., 603., 306.],
[195., 396., 603., 621., 639., 432., 219.],
[105., 213., 324., 333., 342., 231., 117.]],
[[60., 122., 186., 192., 198., 134., 68.],
[130., 264., 402., 414., 426., 288., 146.],
[210., 426., 648., 666., 684., 462., 234.],
[240., 486., 738., 756., 774., 522., 264.],
[170., 344., 522., 534., 546., 368., 186.],
[90., 182., 276., 282., 288., 194., 98.]],
[[40., 81., 123., 126., 129., 87., 44.],
[85., 172., 261., 267., 273., 184., 93.],
[135., 273., 414., 423., 432., 291., 147.],
[150., 303., 459., 468., 477., 321., 162.],
[105., 212., 321., 327., 333., 224., 113.],
[55., 111., 168., 171., 174., 117., 59.]]]]]).astype(np.float32)
expect(node, inputs=[x, W], outputs=[y], name='test_convtranspose_3d')
@staticmethod
def export_convtranspose_attributes(): # type: () -> None
x = np.array([[[[0., 1., 2.], # (1, 1, 3, 3)
[3., 4., 5.],
[6., 7., 8.]]]]).astype(np.float32)
W = np.array([[[[1., 1., 1.], # (1, 2, 3, 3)
[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]]]]).astype(np.float32)
y = np.array([[[[0., 0., 1., 1., 3., 2., 2., 0.], # (1, 2, 10, 8)
[0., 0., 1., 1., 3., 2., 2., 0.],
[0., 0., 1., 1., 3., 2., 2., 0.],
[3., 3., 7., 4., 9., 5., 5., 0.],
[3., 3., 7., 4., 9., 5., 5., 0.],
[3., 3., 7., 4., 9., 5., 5., 0.],
[6., 6., 13., 7., 15., 8., 8., 0.],
[6., 6., 13., 7., 15., 8., 8., 0.],
[6., 6., 13., 7., 15., 8., 8., 0.],
[0., 0., 0., 0., 0., 0., 0., 0.]],
[[0., 0., 1., 1., 3., 2., 2., 0.],
[0., 0., 1., 1., 3., 2., 2., 0.],
[0., 0., 1., 1., 3., 2., 2., 0.],
[3., 3., 7., 4., 9., 5., 5., 0.],
[3., 3., 7., 4., 9., 5., 5., 0.],
[3., 3., 7., 4., 9., 5., 5., 0.],
[6., 6., 13., 7., 15., 8., 8., 0.],
[6., 6., 13., 7., 15., 8., 8., 0.],
[6., 6., 13., 7., 15., 8., 8., 0.],
[0., 0., 0., 0., 0., 0., 0., 0.]]]]).astype(np.float32)
node = onnx.helper.make_node("ConvTranspose", ["X", "W"], ["Y"],
strides=[3, 2],
output_shape=[10, 8])
expect(node, inputs=[x, W], outputs=[y], name='test_convtranspose_output_shape')
node = onnx.helper.make_node("ConvTranspose", ["X", "W"], ["Y"],
strides=[3, 2],
output_padding=[1, 1])
expect(node, inputs=[x, W], outputs=[y], name='test_convtranspose_pad')
node = onnx.helper.make_node(
'ConvTranspose', ['X', 'W'], ['Y'],
name='test',
strides=[3, 2],
output_shape=[10, 8],
kernel_shape=[3, 3],
output_padding=[1, 1]
)
expect(node, inputs=[x, W], outputs=[y],
name='test_convtranspose_kernel_shape')
@staticmethod
def export_convtranspose_pads(): # type: () -> None
x = np.array([[[[0., 1., 2.], # (1, 1, 3, 3)
[3., 4., 5.],
[6., 7., 8.]]]]).astype(np.float32)
W = np.array([[[[1., 1., 1.], # (1, 2, 3, 3)
[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]]]]).astype(np.float32)
node = onnx.helper.make_node("ConvTranspose", ["X", "W"], ["Y"],
strides=[3, 2],
pads=[1, 2, 1, 2])
y = np.array([[[[1., 1., 3.], # (1, 2, 7, 3)
[1., 1., 3.],
[7., 4., 9.],
[7., 4., 9.],
[7., 4., 9.],
[13., 7., 15.],
[13., 7., 15.]],
[[1., 1., 3.],
[1., 1., 3.],
[7., 4., 9.],
[7., 4., 9.],
[7., 4., 9.],
[13., 7., 15.],
[13., 7., 15.]]]]).astype(np.float32)
expect(node, inputs=[x, W], outputs=[y], name='test_convtranspose_pads')
@staticmethod
def export_convtranspose_dilations(): # type: () -> None
x = np.array([[[[3., 8., 1.], # (1, 1, 3, 3)
[9., 5., 7.],
[3., 2., 6.]]]]).astype(np.float32)
W = np.array([[[[7., 2.], # (1, 1, 2, 2)
[1., 9.]]]]).astype(np.float32)
node = onnx.helper.make_node("ConvTranspose", ["X", "W"], ["Y"], dilations=[2, 2])
y = np.array([[[[21., 56., 13., 16., 2.], # [1, 1, 5, 5]
[63., 35., 67., 10., 14.],
[24., 22., 76., 76., 21.],
[9., 5., 88., 45., 63.],
[3., 2., 33., 18., 54.]]]]).astype(np.float32)
expect(node, inputs=[x, W], outputs=[y], name='test_convtranspose_dilations')
| 45.29562
| 90
| 0.290307
| 1,384
| 12,411
| 2.560694
| 0.16474
| 0.073363
| 0.08465
| 0.100451
| 0.788939
| 0.781603
| 0.775113
| 0.763262
| 0.750282
| 0.735609
| 0
| 0.283096
| 0.468053
| 12,411
| 273
| 91
| 45.461538
| 0.253711
| 0.02812
| 0
| 0.638767
| 0
| 0
| 0.027168
| 0.014706
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026432
| false
| 0
| 0.035242
| 0
| 0.066079
| 0.004405
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3170fdaaf6d72cf6d3706cc25e693341d387a08
| 17,654
|
py
|
Python
|
tests/acceptance/test_branches.py
|
barryib/gitlabform
|
c07b434bd50706ad69e69b37a7eb39230a4414f5
|
[
"MIT"
] | null | null | null |
tests/acceptance/test_branches.py
|
barryib/gitlabform
|
c07b434bd50706ad69e69b37a7eb39230a4414f5
|
[
"MIT"
] | null | null | null |
tests/acceptance/test_branches.py
|
barryib/gitlabform
|
c07b434bd50706ad69e69b37a7eb39230a4414f5
|
[
"MIT"
] | null | null | null |
import pytest
from gitlabform.gitlab import AccessLevel
from tests.acceptance import (
run_gitlabform,
DEFAULT_README,
)
@pytest.fixture(scope="function")
def branches(request, gitlab, group, project):
branches = [
"protect_branch_but_allow_all",
"protect_branch_with_code_owner_approval_required",
"protect_branch_and_disallow_all",
"protect_branch_and_allow_merges",
"protect_branch_and_allow_pushes",
"protect_branch_and_allow_merges_access_levels",
"protect_branch_and_allow_pushes_access_levels",
"protect_branch",
]
for branch in branches:
gitlab.create_branch(f"{group}/{project}", branch, "main")
def fin():
for branch in branches:
gitlab.delete_branch(f"{group}/{project}", branch)
gitlab.set_file(
f"{group}/{project}",
"main",
"README.md",
DEFAULT_README,
"Reset default content",
)
request.addfinalizer(fin)
# protect_branch_with_code_owner_approval_required = f"""
# projects_and_groups:
# {group_and_project_name}:
# branches:
# protect_branch_with_code_owner_approval_required:
# protected: true
# developers_can_push: false
# developers_can_merge: true
# code_owner_approval_required: true
# """
class TestBranches:
def test__protect_branch_but_allow_all(self, gitlab, group, project, branches):
group_and_project_name = f"{group}/{project}"
protect_branch_but_allow_all = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch_but_allow_all:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(protect_branch_but_allow_all, group_and_project_name)
branch = gitlab.get_branch(
group_and_project_name, "protect_branch_but_allow_all"
)
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
# branch_access_levels = gitlab.get_branch_access_levels(
# group_and_project_name, "protect_branch_but_allow_all"
# )
# assert branch_access_levels["code_owner_approval_required"] is False
#
# this test will pass only on GitLab Premium (paid)
# def test__protect_branch_with_code_owner_approval_required(self, gitlab):
# gf = GitLabForm(
# config_string=protect_branch_with_code_owner_approval_required,
# project_or_group=group_and_project_name,
# )
# gf.main()
#
# branch_access_levels = gitlab.get_branch_access_levels(
# group_and_project_name, "protect_branch_with_code_owner_approval_required"
# )
# assert branch_access_levels["code_owner_approval_required"] is True
def test__protect_branch_and_disallow_all(self, gitlab, group, project, branches):
group_and_project_name = f"{group}/{project}"
protect_branch_and_disallow_all = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch_and_disallow_all:
protected: true
developers_can_push: false
developers_can_merge: false
"""
run_gitlabform(protect_branch_and_disallow_all, group_and_project_name)
branch = gitlab.get_branch(
group_and_project_name, "protect_branch_and_disallow_all"
)
assert branch["protected"] is True
assert branch["developers_can_push"] is False
assert branch["developers_can_merge"] is False
def test__mixed_config(self, gitlab, group, project, branches):
group_and_project_name = f"{group}/{project}"
mixed_config = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch_and_allow_merges:
protected: true
developers_can_push: false
developers_can_merge: true
protect_branch_and_allow_pushes:
protected: true
developers_can_push: true
developers_can_merge: false
"""
run_gitlabform(mixed_config, group_and_project_name)
branch = gitlab.get_branch(
group_and_project_name, "protect_branch_and_allow_merges"
)
assert branch["protected"] is True
assert branch["developers_can_push"] is False
assert branch["developers_can_merge"] is True
branch = gitlab.get_branch(
group_and_project_name, "protect_branch_and_allow_pushes"
)
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is False
unprotect_branches = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch_and_allow_merges:
protected: false
protect_branch_and_allow_pushes:
protected: false
"""
run_gitlabform(unprotect_branches, group_and_project_name)
for branch in [
"protect_branch_and_allow_merges",
"protect_branch_and_allow_pushes",
]:
branch = gitlab.get_branch(group_and_project_name, branch)
assert branch["protected"] is False
def test__mixed_config_with_new_api(self, gitlab, group, project, branches):
group_and_project_name = f"{group}/{project}"
mixed_config_with_access_levels = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch_and_allow_merges_access_levels:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.DEVELOPER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
'*_allow_pushes_access_levels':
protected: true
push_access_level: {AccessLevel.DEVELOPER.value}
merge_access_level: {AccessLevel.DEVELOPER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(mixed_config_with_access_levels, group_and_project_name)
(
push_access_level,
merge_access_level,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project_name, "protect_branch_and_allow_merges_access_levels"
)
assert push_access_level is AccessLevel.NO_ACCESS.value
assert merge_access_level is AccessLevel.DEVELOPER.value
assert unprotect_access_level is AccessLevel.MAINTAINER.value
(
push_access_level,
merge_access_level,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project_name, "*_allow_pushes_access_levels"
)
assert push_access_level is AccessLevel.DEVELOPER.value
assert merge_access_level is AccessLevel.DEVELOPER.value
assert unprotect_access_level is AccessLevel.MAINTAINER.value
mixed_config_with_access_levels_update = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch_and_allow_merges_access_levels:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
'*_allow_pushes_access_levels':
protected: true
push_access_level: {AccessLevel.MAINTAINER.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(mixed_config_with_access_levels_update, group_and_project_name)
(
push_access_level,
merge_access_level,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project_name, "protect_branch_and_allow_merges_access_levels"
)
assert push_access_level is AccessLevel.NO_ACCESS.value
assert merge_access_level is AccessLevel.MAINTAINER.value
assert unprotect_access_level is AccessLevel.MAINTAINER.value
(
push_access_level,
merge_access_level,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project_name, "*_allow_pushes_access_levels"
)
assert push_access_level is AccessLevel.MAINTAINER.value
assert merge_access_level is AccessLevel.MAINTAINER.value
assert unprotect_access_level is AccessLevel.MAINTAINER.value
mixed_config_with_access_levels_unprotect_branches = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch_and_allow_merges_access_levels:
protected: false
'*_allow_pushes_access_levels':
protected: false
"""
run_gitlabform(
mixed_config_with_access_levels_unprotect_branches, group_and_project_name
)
for branch in [
"protect_branch_and_allow_merges_access_levels",
"protect_branch_and_allow_pushes_access_levels",
]:
branch = gitlab.get_branch(group_and_project_name, branch)
assert branch["protected"] is False
def test_protect_branch_with_old_api_next_update_with_new_api_and_unprotect(
self, gitlab, group, project, branches
):
group_and_project_name = f"{group}/{project}"
config_protect_branch_with_old_api = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_protect_branch_with_old_api, group_and_project_name)
branch = gitlab.get_branch(group_and_project_name, "protect_branch")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
config_protect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_protect_branch_with_new_api, group_and_project_name)
(
push_access_level,
merge_access_level,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project_name, "protect_branch"
)
assert push_access_level is AccessLevel.NO_ACCESS.value
assert merge_access_level is AccessLevel.MAINTAINER.value
assert unprotect_access_level is AccessLevel.MAINTAINER.value
config_protect_branch_unprotect = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: false
"""
run_gitlabform(config_protect_branch_unprotect, group_and_project_name)
branch = gitlab.get_branch(group_and_project_name, "protect_branch")
assert branch["protected"] is False
def test_protect_branch_with_new_api_next_update_with_old_api_and_unprotect(
self, gitlab, group, project, branches
):
group_and_project_name = f"{group}/{project}"
config_protect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_protect_branch_with_new_api, group_and_project_name)
(
push_access_level,
merge_access_level,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project_name, "protect_branch"
)
assert push_access_level is AccessLevel.NO_ACCESS.value
assert merge_access_level is AccessLevel.MAINTAINER.value
assert unprotect_access_level is AccessLevel.MAINTAINER.value
config_protect_branch_with_old_api = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_protect_branch_with_old_api, group_and_project_name)
branch = gitlab.get_branch(group_and_project_name, "protect_branch")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
config_protect_branch_unprotect = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: false
"""
run_gitlabform(config_protect_branch_unprotect, group_and_project_name)
branch = gitlab.get_branch(group_and_project_name, "protect_branch")
assert branch["protected"] is False
def test_unprotect_when_the_rest_of_the_parameters_are_still_specified_old_api(
self, gitlab, group, project, branches
):
group_and_project_name = f"{group}/{project}"
config_protect_branch_with_old_api = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_protect_branch_with_old_api, group_and_project_name)
branch = gitlab.get_branch(group_and_project_name, "protect_branch")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
config_unprotect_branch_with_old_api = f"""
gitlab:
api_version: 4
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: false
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_unprotect_branch_with_old_api, group_and_project_name)
branch = gitlab.get_branch(group_and_project_name, "protect_branch")
assert branch["protected"] is False
def test_unprotect_when_the_rest_of_the_parameters_are_still_specified_new_api(
self, gitlab, group, project, branches
):
group_and_project_name = f"{group}/{project}"
config_protect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_protect_branch_with_new_api, group_and_project_name)
(
push_access_level,
merge_access_level,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project_name, "protect_branch"
)
assert push_access_level is AccessLevel.NO_ACCESS.value
assert merge_access_level is AccessLevel.MAINTAINER.value
assert unprotect_access_level is AccessLevel.MAINTAINER.value
config_unprotect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project_name}:
branches:
protect_branch:
protected: false
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_unprotect_branch_with_new_api, group_and_project_name)
# old API
branch = gitlab.get_branch(group_and_project_name, "protect_branch")
assert branch["protected"] is False
# new API
(
push_access_level,
merge_access_level,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project_name, "protect_branch"
)
assert push_access_level is None
assert merge_access_level is None
assert unprotect_access_level is None
| 36.85595
| 92
| 0.650164
| 1,910
| 17,654
| 5.504712
| 0.051832
| 0.096443
| 0.095587
| 0.121077
| 0.945596
| 0.905555
| 0.882918
| 0.85486
| 0.851151
| 0.832033
| 0
| 0.000079
| 0.287244
| 17,654
| 478
| 93
| 36.933054
| 0.835492
| 0.058004
| 0
| 0.772251
| 0
| 0
| 0.425904
| 0.155783
| 0
| 0
| 0
| 0
| 0.133508
| 1
| 0.026178
| false
| 0
| 0.007853
| 0
| 0.036649
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c31ece530403e309d1ebd07482de7db1ada2ac84
| 9,733
|
py
|
Python
|
tinkoff/invest/__init__.py
|
Tinkoff/invest-python
|
26ef8cc1e70d144707246e64f29466b4491d4f8c
|
[
"Apache-2.0"
] | 41
|
2022-01-21T05:38:57.000Z
|
2022-03-30T03:54:41.000Z
|
tinkoff/invest/__init__.py
|
Tinkoff/invest-python
|
26ef8cc1e70d144707246e64f29466b4491d4f8c
|
[
"Apache-2.0"
] | 20
|
2022-01-24T05:46:02.000Z
|
2022-03-31T16:54:04.000Z
|
tinkoff/invest/__init__.py
|
Tinkoff/invest-python
|
26ef8cc1e70d144707246e64f29466b4491d4f8c
|
[
"Apache-2.0"
] | 15
|
2022-01-25T06:53:27.000Z
|
2022-03-30T03:49:07.000Z
|
from .clients import AsyncClient, Client
from .exceptions import AioRequestError, InvestError, RequestError
from .logging import get_current_tracking_id
from .schemas import (
AccessLevel,
Account,
AccountStatus,
AccountType,
AccruedInterest,
Asset,
AssetBond,
AssetClearingCertificate,
AssetCurrency,
AssetEtf,
AssetFull,
AssetInstrument,
AssetRequest,
AssetResponse,
AssetSecurity,
AssetShare,
AssetsRequest,
AssetsResponse,
AssetStructuredProduct,
AssetType,
Bond,
BondResponse,
BondsResponse,
Brand,
BrokerReportRequest,
BrokerReportResponse,
CancelOrderRequest,
CancelOrderResponse,
CancelStopOrderRequest,
CancelStopOrderResponse,
Candle,
CandleInstrument,
CandleInterval,
CandleSubscription,
CloseSandboxAccountRequest,
CloseSandboxAccountResponse,
CountryResponse,
Coupon,
CouponType,
CurrenciesResponse,
Currency,
CurrencyResponse,
Dividend,
DividendsForeignIssuerReport,
EditFavoritesActionType,
EditFavoritesRequest,
EditFavoritesRequestInstrument,
EditFavoritesResponse,
Etf,
EtfResponse,
EtfsResponse,
FavoriteInstrument,
FindInstrumentRequest,
FindInstrumentResponse,
Future,
FutureResponse,
FuturesResponse,
GenerateBrokerReportRequest,
GenerateDividendsForeignIssuerReportRequest,
GenerateDividendsForeignIssuerReportResponse,
GetAccountsRequest,
GetAccountsResponse,
GetAccruedInterestsRequest,
GetAccruedInterestsResponse,
GetBondCouponsRequest,
GetBondCouponsResponse,
GetBrandRequest,
GetBrandsRequest,
GetBrandsResponse,
GetBrokerReportRequest,
GetCandlesRequest,
GetCandlesResponse,
GetCountriesRequest,
GetCountriesResponse,
GetDividendsForeignIssuerReportRequest,
GetDividendsForeignIssuerReportResponse,
GetDividendsForeignIssuerRequest,
GetDividendsForeignIssuerResponse,
GetDividendsRequest,
GetDividendsResponse,
GetFavoritesRequest,
GetFavoritesResponse,
GetFuturesMarginRequest,
GetFuturesMarginResponse,
GetInfoRequest,
GetInfoResponse,
GetLastPricesRequest,
GetLastPricesResponse,
GetLastTradesRequest,
GetLastTradesResponse,
GetMarginAttributesRequest,
GetMarginAttributesResponse,
GetOrderBookRequest,
GetOrderBookResponse,
GetOrdersRequest,
GetOrdersResponse,
GetOrderStateRequest,
GetStopOrdersRequest,
GetStopOrdersResponse,
GetTradingStatusRequest,
GetTradingStatusResponse,
GetUserTariffRequest,
GetUserTariffResponse,
HistoricCandle,
InfoInstrument,
InfoSubscription,
Instrument,
InstrumentIdType,
InstrumentLink,
InstrumentRequest,
InstrumentResponse,
InstrumentShort,
InstrumentsRequest,
InstrumentStatus,
LastPrice,
LastPriceInstrument,
LastPriceSubscription,
MarketDataRequest,
MarketDataResponse,
MarketDataServerSideStreamRequest,
MoneyValue,
OpenSandboxAccountRequest,
OpenSandboxAccountResponse,
Operation,
OperationsRequest,
OperationsResponse,
OperationState,
OperationTrade,
OperationType,
Order,
OrderBook,
OrderBookInstrument,
OrderBookSubscription,
OrderDirection,
OrderExecutionReportStatus,
OrderStage,
OrderState,
OrderTrade,
OrderTrades,
OrderType,
PortfolioPosition,
PortfolioRequest,
PortfolioResponse,
PositionsRequest,
PositionsResponse,
PositionsSecurities,
PostOrderRequest,
PostOrderResponse,
PostStopOrderRequest,
PostStopOrderResponse,
Quotation,
RealExchange,
SandboxPayInRequest,
SandboxPayInResponse,
SecurityTradingStatus,
Share,
ShareResponse,
SharesResponse,
ShareType,
StopOrder,
StopOrderDirection,
StopOrderExpirationType,
StopOrderType,
StreamLimit,
StructuredProductType,
SubscribeCandlesRequest,
SubscribeCandlesResponse,
SubscribeInfoRequest,
SubscribeInfoResponse,
SubscribeLastPriceRequest,
SubscribeLastPriceResponse,
SubscribeOrderBookRequest,
SubscribeOrderBookResponse,
SubscribeTradesRequest,
SubscribeTradesResponse,
SubscriptionAction,
SubscriptionInterval,
SubscriptionStatus,
Trade,
TradeDirection,
TradeInstrument,
TradesStreamRequest,
TradesStreamResponse,
TradeSubscription,
TradingDay,
TradingSchedule,
TradingSchedulesRequest,
TradingSchedulesResponse,
TradingStatus,
UnaryLimit,
WithdrawLimitsRequest,
WithdrawLimitsResponse,
)
__all__ = (
"AccessLevel",
"Account",
"AccountStatus",
"AccountType",
"AccruedInterest",
"AioRequestError",
"Asset",
"AssetBond",
"AssetClearingCertificate",
"AssetCurrency",
"AssetEtf",
"AssetFull",
"AssetInstrument",
"AssetRequest",
"AssetResponse",
"AssetSecurity",
"AssetShare",
"AssetsRequest",
"AssetsResponse",
"AssetStructuredProduct",
"AssetType",
"AsyncClient",
"Bond",
"BondResponse",
"BondsResponse",
"Brand",
"BrokerReportRequest",
"BrokerReportResponse",
"CancelOrderRequest",
"CancelOrderResponse",
"CancelStopOrderRequest",
"CancelStopOrderResponse",
"Candle",
"CandleInstrument",
"CandleInterval",
"CandleSubscription",
"Client",
"CloseSandboxAccountRequest",
"CloseSandboxAccountResponse",
"CountryResponse",
"Coupon",
"CouponType",
"CurrenciesResponse",
"Currency",
"CurrencyResponse",
"Dividend",
"DividendsForeignIssuerReport",
"EditFavoritesActionType",
"EditFavoritesRequest",
"EditFavoritesRequestInstrument",
"EditFavoritesResponse",
"Etf",
"EtfResponse",
"EtfsResponse",
"FavoriteInstrument",
"FindInstrumentRequest",
"FindInstrumentResponse",
"Future",
"FutureResponse",
"FuturesResponse",
"GenerateBrokerReportRequest",
"GenerateDividendsForeignIssuerReportRequest",
"GenerateDividendsForeignIssuerReportResponse",
"get_current_tracking_id",
"GetAccountsRequest",
"GetAccountsResponse",
"GetAccruedInterestsRequest",
"GetAccruedInterestsResponse",
"GetBondCouponsRequest",
"GetBondCouponsResponse",
"GetBrandRequest",
"GetBrandsRequest",
"GetBrandsResponse",
"GetBrokerReportRequest",
"GetCandlesRequest",
"GetCandlesResponse",
"GetCountriesRequest",
"GetCountriesResponse",
"GetDividendsForeignIssuerReportRequest",
"GetDividendsForeignIssuerReportResponse",
"GetDividendsForeignIssuerRequest",
"GetDividendsForeignIssuerResponse",
"GetDividendsRequest",
"GetDividendsResponse",
"GetFavoritesRequest",
"GetFavoritesResponse",
"GetFuturesMarginRequest",
"GetFuturesMarginResponse",
"GetInfoRequest",
"GetInfoResponse",
"GetLastPricesRequest",
"GetLastPricesResponse",
"GetLastTradesRequest",
"GetLastTradesResponse",
"GetMarginAttributesRequest",
"GetMarginAttributesResponse",
"GetOrderBookRequest",
"GetOrderBookResponse",
"GetOrdersRequest",
"GetOrdersResponse",
"GetOrderStateRequest",
"GetStopOrdersRequest",
"GetStopOrdersResponse",
"GetTradingStatusRequest",
"GetTradingStatusResponse",
"GetUserTariffRequest",
"GetUserTariffResponse",
"HistoricCandle",
"InfoInstrument",
"InfoSubscription",
"Instrument",
"InstrumentIdType",
"InstrumentLink",
"InstrumentRequest",
"InstrumentResponse",
"InstrumentShort",
"InstrumentsRequest",
"InstrumentStatus",
"InvestError",
"LastPrice",
"LastPriceInstrument",
"LastPriceSubscription",
"MarketDataRequest",
"MarketDataResponse",
"MarketDataServerSideStreamRequest",
"MoneyValue",
"OpenSandboxAccountRequest",
"OpenSandboxAccountResponse",
"Operation",
"OperationsRequest",
"OperationsResponse",
"OperationState",
"OperationTrade",
"OperationType",
"Order",
"OrderBook",
"OrderBookInstrument",
"OrderBookSubscription",
"OrderDirection",
"OrderExecutionReportStatus",
"OrderStage",
"OrderState",
"OrderTrade",
"OrderTrades",
"OrderType",
"PortfolioPosition",
"PortfolioRequest",
"PortfolioResponse",
"PositionsRequest",
"PositionsResponse",
"PositionsSecurities",
"PostOrderRequest",
"PostOrderResponse",
"PostStopOrderRequest",
"PostStopOrderResponse",
"Quotation",
"RealExchange",
"RequestError",
"SandboxPayInRequest",
"SandboxPayInResponse",
"SecurityTradingStatus",
"Share",
"ShareResponse",
"SharesResponse",
"ShareType",
"StopOrder",
"StopOrderDirection",
"StopOrderExpirationType",
"StopOrderType",
"StreamLimit",
"StructuredProductType",
"SubscribeCandlesRequest",
"SubscribeCandlesResponse",
"SubscribeInfoRequest",
"SubscribeInfoResponse",
"SubscribeLastPriceRequest",
"SubscribeLastPriceResponse",
"SubscribeOrderBookRequest",
"SubscribeOrderBookResponse",
"SubscribeTradesRequest",
"SubscribeTradesResponse",
"SubscriptionAction",
"SubscriptionInterval",
"SubscriptionStatus",
"Trade",
"TradeDirection",
"TradeInstrument",
"TradesStreamRequest",
"TradesStreamResponse",
"TradeSubscription",
"TradingDay",
"TradingSchedule",
"TradingSchedulesRequest",
"TradingSchedulesResponse",
"TradingStatus",
"UnaryLimit",
"WithdrawLimitsRequest",
"WithdrawLimitsResponse",
)
| 24.393484
| 66
| 0.715093
| 415
| 9,733
| 16.746988
| 0.501205
| 0.002878
| 0.00518
| 0.005755
| 0.96777
| 0.951367
| 0.951367
| 0.951367
| 0.951367
| 0.951367
| 0
| 0
| 0.202918
| 9,733
| 398
| 67
| 24.454774
| 0.895849
| 0
| 0
| 0
| 0
| 0
| 0.35354
| 0.150005
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010076
| 0
| 0.010076
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c33825279307c0f5f6d1214ecf2f950fd880ebae
| 14,635
|
py
|
Python
|
tests/sci/test_sci_units.py
|
KarrLab/schema
|
1f192754e0ee32b07bde9bcf8a21723d4b393f9e
|
[
"MIT"
] | 7
|
2020-08-07T17:24:29.000Z
|
2021-05-20T04:45:14.000Z
|
tests/sci/test_sci_units.py
|
KarrLab/schema
|
1f192754e0ee32b07bde9bcf8a21723d4b393f9e
|
[
"MIT"
] | 92
|
2019-09-18T20:52:31.000Z
|
2020-12-10T17:46:52.000Z
|
tests/sci/test_sci_units.py
|
KarrLab/obj_model
|
1f192754e0ee32b07bde9bcf8a21723d4b393f9e
|
[
"MIT"
] | 2
|
2020-10-05T21:02:09.000Z
|
2021-06-14T03:08:06.000Z
|
""" Test unit attributes
:Author: Jonathan Karr <karr@mssm.edu>
:Date: 2019-01-20
:Copyright: 2019, Karr Lab
:License: MIT
"""
from obj_tables import core
from obj_tables.sci import units
from wc_utils.util.units import unit_registry
import pint
import unittest
class UnitAttributeTestCase(unittest.TestCase):
def test_init(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry,
choices=[registry.parse_units('s'), registry.parse_units('g')],
default=registry.parse_units('s'),
default_cleaned_value=registry.parse_units('g'))
self.assertEqual(len(attr.choices), 2)
attr = units.UnitAttribute(registry)
self.assertEqual(attr.choices, None)
with self.assertRaisesRegex(ValueError, '`registry` must be an instance of'):
units.UnitAttribute('registry')
with self.assertRaisesRegex(ValueError, '`default` must be an instance of'):
units.UnitAttribute(registry, default='s')
with self.assertRaisesRegex(ValueError, '`default_cleaned_value` must be an instance of'):
units.UnitAttribute(registry, default_cleaned_value='g')
with self.assertRaisesRegex(ValueError, 'choices must be instances of'):
units.UnitAttribute(registry, choices=['g'])
def test_get_default(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry, default=registry.parse_units('s'))
self.assertEqual(attr.get_default(), registry.parse_units('s'))
def test_get_default_cleaned_value(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry, default_cleaned_value=registry.parse_units('g'))
self.assertEqual(attr.get_default_cleaned_value(), registry.parse_units('g'))
def test_value_equal(self):
registry1 = unit_registry
registry2 = pint.UnitRegistry()
registry3 = pint.UnitRegistry()
attr = units.UnitAttribute(unit_registry)
self.assertTrue(attr.value_equal(registry1.parse_units('g'), registry1.parse_units('g')))
self.assertFalse(attr.value_equal(registry1.parse_units('g'), registry2.parse_units('g')))
self.assertTrue(attr.value_equal(registry1.parse_units('g'), registry1.parse_units('g / l * l')))
self.assertFalse(attr.value_equal(registry1.parse_units('g'), registry2.parse_units('g / l * l')))
self.assertTrue(attr.value_equal(registry1.parse_units('M'), registry1.parse_units('mol / l')))
self.assertTrue(attr.value_equal(None, None))
self.assertFalse(attr.value_equal(None, registry1.parse_units('mol / l')))
self.assertFalse(attr.value_equal('g', registry1.parse_units('mol / l')))
self.assertFalse(attr.value_equal(registry1.parse_units('mol / l'), None))
self.assertFalse(attr.value_equal(registry1.parse_units('g'), registry1.parse_units('l')))
self.assertFalse(attr.value_equal(registry1.parse_units('ag'), registry1.parse_units('g')))
self.assertTrue(attr.value_equal(registry1.parse_units('ml'), registry1.parse_units('cm^3')))
def test_clean(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry, default_cleaned_value=registry.parse_units('g'))
self.assertEqual(attr.clean(''), (registry.parse_units('g'), None))
self.assertEqual(attr.clean(None), (registry.parse_units('g'), None))
self.assertEqual(attr.clean('s'), (registry.parse_units('s'), None))
self.assertEqual(attr.clean('dimensionless'), (registry.parse_units('dimensionless'), None))
self.assertNotEqual(attr.clean('dimensionless')[0], None)
self.assertEqual(attr.clean(registry.parse_units('s')), (registry.parse_units('s'), None))
self.assertNotEqual(attr.clean(1.)[1], None)
self.assertNotEqual(attr.clean('not_a_unit')[1], None)
attr = units.UnitAttribute(registry, default_cleaned_value=None)
self.assertEqual(attr.clean(''), (None, None))
self.assertEqual(attr.clean(None), (None, None))
self.assertEqual(attr.clean('s'), (registry.parse_units('s'), None))
def test_validate(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry, choices=[registry.parse_units('s')])
self.assertNotEqual(attr.validate(None, ''), None)
self.assertEqual(attr.validate(None, None), None)
self.assertEqual(attr.validate(None, registry.parse_units('s')), None)
self.assertNotEqual(attr.validate(None, registry.parse_units('g')), None)
self.assertNotEqual(attr.validate(None, 's'), None)
self.assertNotEqual(attr.validate(None, 2.), None)
attr = units.UnitAttribute(registry)
self.assertEqual(attr.validate(None, None), None)
self.assertEqual(attr.validate(None, registry.parse_units('s')), None)
self.assertEqual(attr.validate(None, registry.parse_units('g')), None)
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry, none=False)
self.assertNotEqual(attr.validate(None, None), None)
def test_copy_value(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry)
unit = registry.parse_units('s')
self.assertIs(attr.copy_value(unit, {}), unit)
def test_serialize(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry)
self.assertEqual(attr.serialize(''), '')
self.assertEqual(attr.serialize(None), '')
self.assertEqual(attr.serialize(registry.parse_units('s')), 'second')
self.assertEqual(attr.serialize(registry.parse_units('dimensionless')), 'dimensionless')
def test_to_builtin(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry)
self.assertEqual(attr.to_builtin(''), None)
self.assertEqual(attr.to_builtin(None), None)
self.assertEqual(attr.to_builtin(registry.parse_units('s')), 'second')
def test_from_builtin(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry)
self.assertEqual(attr.from_builtin(''), None)
self.assertEqual(attr.from_builtin(None), None)
self.assertEqual(attr.from_builtin('s'), registry.parse_units('s'))
def test_get_obj_units(self):
registry = pint.UnitRegistry()
class TestModel(core.Model):
registry = pint.UnitRegistry()
str_attr = core.StringAttribute()
unit_attr_1 = units.UnitAttribute(registry)
unit_attr_2 = units.UnitAttribute(registry)
units_g = registry.parse_units('g')
units_l = registry.parse_units('l')
model = TestModel(str_attr='s',
unit_attr_1=units_g,
unit_attr_2=units_l)
self.assertEqual(set(units.get_obj_units(model)), set([units_g, units_l]))
def test_get_xlsx_validation(self):
registry = pint.UnitRegistry()
attr = units.UnitAttribute(registry, none=True, unique=True,
default_cleaned_value=registry.parse_units('g'), choices=[registry.parse_units('g')])
attr.get_xlsx_validation()
attr = units.UnitAttribute(registry, none=False, unique=True,
default_cleaned_value=registry.parse_units('g'), choices=[registry.parse_units('g')])
attr.get_xlsx_validation()
attr = units.UnitAttribute(registry, none=True, unique=True)
attr.get_xlsx_validation()
attr = units.UnitAttribute(registry, none=False, unique=False)
attr.get_xlsx_validation()
class QuantityAttributeTestCase(unittest.TestCase):
def test_init(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry,
choices=[registry.parse_units('s'), registry.parse_units('g')],
default=registry.parse_expression('4 s'),
default_cleaned_value=registry.parse_expression('3 g'))
self.assertEqual(len(attr.choices), 2)
attr = units.QuantityAttribute(registry)
self.assertEqual(attr.choices, None)
with self.assertRaisesRegex(ValueError, '`registry` must be an instance of'):
units.QuantityAttribute('registry')
with self.assertRaisesRegex(ValueError, '`default` must be an instance of'):
units.QuantityAttribute(registry, default='2 g')
with self.assertRaisesRegex(ValueError, '`default_cleaned_value` must be an instance of'):
units.QuantityAttribute(registry, default_cleaned_value='3 m')
with self.assertRaisesRegex(ValueError, 'choices must be instances of'):
units.QuantityAttribute(registry, choices=['g'])
def test_get_default(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry, default=registry.parse_expression('2 s'))
self.assertEqual(attr.get_default(), registry.Quantity(2, registry.parse_units('s')))
def test_get_default_cleaned_value(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry, default_cleaned_value=registry.parse_expression('3 g'))
self.assertEqual(attr.get_default_cleaned_value(), registry.parse_expression('3 g'))
def test_value_equal(self):
registry1 = unit_registry
registry2 = pint.UnitRegistry()
registry3 = pint.UnitRegistry()
attr = units.QuantityAttribute(unit_registry)
self.assertTrue(attr.value_equal(registry1.parse_expression('2 g'), registry1.parse_expression('2 g')))
self.assertFalse(attr.value_equal(registry1.parse_expression('2 g'), registry2.parse_expression('2 g')))
self.assertTrue(attr.value_equal(registry1.parse_expression('3 M'), registry1.parse_expression('3 mol / l')))
self.assertTrue(attr.value_equal(None, None))
self.assertFalse(attr.value_equal(None, registry1.parse_expression('2 mol / l')))
self.assertFalse(attr.value_equal('2 g', registry1.parse_expression('2 g')))
self.assertFalse(attr.value_equal(registry1.parse_expression('2 mol / l'), None))
self.assertFalse(attr.value_equal(registry1.parse_expression('2 g'), registry1.parse_expression('2 l')))
self.assertFalse(attr.value_equal(registry1.parse_expression('2 mg'), registry1.parse_expression('2 g')))
self.assertTrue(attr.value_equal(registry1.parse_expression('2000 mg'), registry1.parse_expression('2 g')))
self.assertTrue(attr.value_equal(registry1.parse_expression('2 ml'), registry1.parse_expression('2 cm^3')))
def test_clean(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry, default_cleaned_value=registry.parse_expression('g'))
self.assertEqual(attr.clean(''), (registry.parse_expression('g'), None))
self.assertEqual(attr.clean(None), (registry.parse_expression('g'), None))
self.assertEqual(attr.clean('s'), (registry.parse_expression('s'), None))
self.assertEqual(attr.clean('dimensionless'), (registry.parse_expression('dimensionless'), None))
self.assertNotEqual(attr.clean('dimensionless')[0], None)
self.assertEqual(attr.clean(registry.parse_expression('s')), (registry.parse_expression('s'), None))
self.assertNotEqual(attr.clean(1.)[1], None)
self.assertNotEqual(attr.clean('not_a_unit')[1], None)
attr = units.QuantityAttribute(registry, default_cleaned_value=None)
self.assertEqual(attr.clean(''), (None, None))
self.assertEqual(attr.clean(None), (None, None))
self.assertEqual(attr.clean('s'), (registry.parse_expression('s'), None))
def test_validate(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry, choices=[registry.parse_units('s')])
self.assertNotEqual(attr.validate(None, ''), None)
self.assertEqual(attr.validate(None, None), None)
self.assertEqual(attr.validate(None, registry.parse_expression('s')), None)
self.assertNotEqual(attr.validate(None, registry.parse_expression('g')), None)
self.assertNotEqual(attr.validate(None, 's'), None)
self.assertNotEqual(attr.validate(None, 2.), None)
attr = units.QuantityAttribute(registry)
self.assertEqual(attr.validate(None, None), None)
self.assertEqual(attr.validate(None, registry.parse_expression('s')), None)
self.assertEqual(attr.validate(None, registry.parse_expression('g')), None)
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry, none=False)
self.assertNotEqual(attr.validate(None, None), None)
def test_copy_value(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry)
unit = registry.parse_expression('s')
self.assertTrue(attr.value_equal(attr.copy_value(unit, {}), unit))
def test_serialize(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry)
self.assertEqual(attr.serialize(''), '')
self.assertEqual(attr.serialize(None), '')
self.assertEqual(attr.serialize(registry.parse_expression('s')), '1 second')
self.assertEqual(attr.serialize(registry.parse_expression('dimensionless')), '1 dimensionless')
def test_to_builtin(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry)
self.assertEqual(attr.to_builtin(''), None)
self.assertEqual(attr.to_builtin(None), None)
self.assertEqual(attr.to_builtin(registry.parse_expression('s')), {
'magnitude': 1.0,
'units': 'second',
})
def test_from_builtin(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry)
self.assertEqual(attr.from_builtin(''), None)
self.assertEqual(attr.from_builtin(None), None)
self.assertEqual(attr.from_builtin({'magnitude': 1, 'units': 's'}), registry.parse_expression('s'))
def test_get_xlsx_validation(self):
registry = pint.UnitRegistry()
attr = units.QuantityAttribute(registry, none=True, unique=True, default_cleaned_value=registry.parse_expression('1 g'))
attr.get_xlsx_validation()
attr = units.QuantityAttribute(registry, none=False, unique=False)
attr.get_xlsx_validation()
| 48.946488
| 128
| 0.673522
| 1,668
| 14,635
| 5.754796
| 0.06235
| 0.079904
| 0.102927
| 0.076675
| 0.907386
| 0.891968
| 0.883009
| 0.838525
| 0.807792
| 0.704136
| 0
| 0.009052
| 0.192347
| 14,635
| 298
| 129
| 49.110738
| 0.803046
| 0.008131
| 0
| 0.564103
| 0
| 0
| 0.050524
| 0.003171
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.098291
| false
| 0
| 0.021368
| 0
| 0.132479
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c33932b2aeaa47bf4fd9e2581d6e4457882aa2a5
| 37,537
|
py
|
Python
|
test/integration/test_catalog_management_v1.py
|
zachsirotto/platform-services-python-sdk
|
32a080b7a93567f9528867a31bd0b47423297bab
|
[
"Apache-2.0"
] | null | null | null |
test/integration/test_catalog_management_v1.py
|
zachsirotto/platform-services-python-sdk
|
32a080b7a93567f9528867a31bd0b47423297bab
|
[
"Apache-2.0"
] | null | null | null |
test/integration/test_catalog_management_v1.py
|
zachsirotto/platform-services-python-sdk
|
32a080b7a93567f9528867a31bd0b47423297bab
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# (C) Copyright IBM Corp. 2020.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Test the platform service Catalog Management API operations
'''
import unittest
import os
from ibm_cloud_sdk_core import *
from ibm_platform_services.catalog_management_v1 import *
import pytest
from dotenv import load_dotenv
import time
configFile = 'catalog_mgmt.env'
configLoaded = None
timestamp = int(time.time())
expectedAccount = '67d27f28d43948b2b3bda9138f251a13'
expectedLabel = 'integration-test-{}'.format(timestamp)
expectedShortDesc = 'test'
expectedURL = 'https://cm.globalcatalog.test.cloud.ibm.com/api/v1-beta/catalogs/{}'
expectedOfferingsURL = 'https://cm.globalcatalog.test.cloud.ibm.com/api/v1-beta/catalogs/{}/offerings'
fakeName = 'bogus'
fakeVersionLocator = 'bogus.bogus'
expectedOfferingName = "test-offering"
expectedOfferingURL = "https://cm.globalcatalog.test.cloud.ibm.com/api/v1-beta/catalogs/{}/offerings/{}"
if os.path.exists(configFile):
load_dotenv(dotenv_path=configFile)
configLoaded = True
else:
print('External configuration was not found, skipping tests...')
class TestCatalogManagementV1(unittest.TestCase):
"""
Integration Test Class for CaatalogManagementV1
"""
@classmethod
def setUpClass(self):
if not configLoaded:
raise unittest.SkipTest('External configuration not available, skipping...')
self.service = CatalogManagementV1.new_instance()
self.config = read_external_sources(CatalogManagementV1.DEFAULT_SERVICE_NAME)
assert self.config is not None
self.gitToken = self.config.get('GIT_TOKEN')
assert self.gitToken is not None
def setUp(self):
result = self.service.list_catalogs().get_result()
if result is not None:
resources = result.get('resources')
for resource in resources:
if resource.get('label') == expectedLabel:
self.service.delete_catalog(catalog_identifier=resource.get('id'))
def tearDown(self):
result = self.service.list_catalogs().get_result()
if result is not None:
resources = result.get('resources')
for resource in resources:
if resource.get('label') == expectedLabel:
self.service.delete_catalog(catalog_identifier=resource.get('id'))
def test_get_catalog_account(self):
response = self.service.get_catalog_account()
assert response is not None
assert response.get_status_code() == 200
result = response.get_result()
assert result.get('id') == expectedAccount
assert result.get('account_filters').get('include_all') is True
assert result.get('account_filters').get('category_filters') is None
assert result.get('account_filters').get('id_filters').get('include') is None
assert result.get('account_filters').get('id_filters').get('exclude') is None
def test_get_catalog_account_filters(self):
response = self.service.get_catalog_account_filters()
assert response is not None
assert response.get_status_code() == 200
result = response.get_result()
assert result.get('account_filters')[0].get('include_all') is True
assert result.get('account_filters')[0].get('category_filters') is None
assert result.get('account_filters')[0].get('id_filters') .get('include') is None
assert result.get('account_filters')[0].get('id_filters') .get('exclude') is None
def test_list_catalogs(self):
catalogCount = 0
catalogIndex = -1
createResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
createResult = createResponse.get_result()
listResponse = self.service.list_catalogs()
self.service.delete_catalog(catalog_identifier=createResult.get('id'))
if listResponse.get_result() is not None:
for i, resource in enumerate(listResponse.get_result().get('resources')):
if resource.get('label') == expectedLabel:
catalogCount = catalogCount + 1
catalogIndex = i
assert listResponse is not None
assert listResponse.get_status_code() == 200
listResult = listResponse.get_result()
assert listResult.get('offset') == 0
assert listResult.get('limit') == 0
assert catalogCount == 1
assert listResult.get('last') is None
assert listResult.get('prev') is None
assert listResult.get('next') is None
resources = listResult.get('resources')
assert resources is not None
assert resources[catalogIndex].get('label') == expectedLabel
assert resources[catalogIndex].get('short_description') == expectedShortDesc
assert resources[catalogIndex].get('url') == expectedURL.format(createResult.get('id'))
assert resources[catalogIndex].get('offerings_url') == expectedOfferingsURL.format(createResult.get('id'))
assert resources[catalogIndex].get('owning_account') == expectedAccount
assert resources[catalogIndex].get('catalog_filters').get('include_all') is False
assert resources[catalogIndex].get('catalog_filters').get('category_filters') is None
assert resources[catalogIndex].get('catalog_filters').get('id_filters').get('include') is None
assert resources[catalogIndex].get('catalog_filters').get('id_filters').get('exclude') is None
def test_create_catalog(self):
response = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
assert response is not None
assert response.get_status_code() == 201
result = response.get_result()
self.service.delete_catalog(catalog_identifier=result.get('id'))
assert result.get('label') == expectedLabel
assert result.get('short_description') == expectedShortDesc
assert result.get('url') == expectedURL.format(result.get('id'))
assert result.get('offerings_url') == expectedOfferingsURL.format(result.get('id'))
assert result.get('owning_account') == expectedAccount
assert result.get('catalog_filters').get('include_all') is False
assert result.get('catalog_filters').get('category_filters') is None
assert result.get('catalog_filters').get('id_filters').get('include') is None
assert result.get('catalog_filters').get('id_filters').get('exclude') is None
def test_get_catalog(self):
createResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
createResult = createResponse.get_result()
getResponse = self.service.get_catalog(catalog_identifier=createResult.get('id'))
getResult = getResponse.get_result()
assert getResponse is not None
assert getResponse.get_status_code() == 200
self.service.delete_catalog(catalog_identifier=createResult.get('id'))
assert getResult.get('label') == expectedLabel
assert getResult.get('short_description') == expectedShortDesc
assert getResult.get('url') == expectedURL.format(getResult.get('id'))
assert getResult.get('offerings_url') == expectedOfferingsURL.format(getResult.get('id'))
assert getResult.get('owning_account') == expectedAccount
assert getResult.get('catalog_filters').get('include_all') is False
assert getResult.get('catalog_filters').get('category_filters') is None
assert getResult.get('catalog_filters').get('id_filters').get('include') is None
assert getResult.get('catalog_filters').get('id_filters').get('exclude') is None
def test_get_catalog_failure(self):
with pytest.raises(ApiException) as e:
self.service.get_catalog(catalog_identifier=fakeName)
assert e.value.code == 404
def test_update_catalog(self):
expectedLabelUpdated = "test2"
expectedShortDescUpdated = "integration-test-update"
createResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
createResult = createResponse.get_result()
updateResponse = self.service.replace_catalog(catalog_identifier=createResult.get('id'), id=createResult.get('id'), label=expectedLabelUpdated, short_description=expectedShortDescUpdated)
updateResult = updateResponse.get_result()
self.service.delete_catalog(catalog_identifier=createResult.get('id'))
assert updateResponse is not None
assert updateResponse.get_status_code() == 200
assert updateResult.get('label') == expectedLabelUpdated
assert updateResult.get('short_description') == expectedShortDescUpdated
# assert updateResult.get('url') == expectedURL.format(createResult.get('id'))
# assert updateResult.get('offerings_url') == expectedOfferingsURL.format(createResult.get('id'))
# assert updateResult.get('owning_account') == expectedAccount
assert updateResult.get('catalog_filters').get('include_all') is True
assert updateResult.get('catalog_filters').get('category_filters') is None
assert updateResult.get('catalog_filters').get('id_filters').get('include') is None
assert updateResult.get('catalog_filters').get('id_filters').get('exclude') is None
def test_update_catalog_failure(self):
with pytest.raises(ApiException) as e:
self.service.replace_catalog(catalog_identifier=fakeName, id=fakeName)
assert e.value.code == 404
def test_delete_catalog(self):
createResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
createResult = createResponse.get_result()
deleteResponse = self.service.delete_catalog(catalog_identifier=createResult.get('id'), id=createResult.get('id'))
assert deleteResponse is not None
assert deleteResponse.get_status_code() == 200
def test_delete_catalog_failure(self):
deleteResponse = self.service.delete_catalog(catalog_identifier=fakeName, id=fakeName)
assert deleteResponse is not None
assert deleteResponse.get_status_code() == 200
def test_create_offering(self):
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.create_offering(catalog_identifier=catalogResult.get('id'), name=expectedOfferingName, label=expectedLabel)
offeringResult = offeringResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert offeringResponse is not None
assert offeringResponse.get_status_code() == 201
assert offeringResult.get('name') == expectedOfferingName
assert offeringResult.get('url') == expectedOfferingURL.format(catalogResult.get('id'), offeringResult.get('id'))
assert offeringResult.get('label') == expectedLabel
def test_get_offering(self):
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.create_offering(catalog_identifier=catalogResult.get('id'), name=expectedOfferingName, label=expectedLabel)
offeringResult = offeringResponse.get_result()
getResponse = self.service.get_offering(catalog_identifier=catalogResult.get('id'), offering_id=offeringResult.get('id'))
getResult = getResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert getResponse is not None
assert getResponse.get_status_code() == 200
assert getResult.get('name') == expectedOfferingName
assert getResult.get('url') == expectedOfferingURL.format(catalogResult.get('id'), offeringResult.get('id'))
assert getResult.get('label') == expectedLabel
def test_get_offering_failure(self):
createResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
createResult = createResponse.get_result()
with pytest.raises(ApiException) as e:
self.service.get_offering(catalog_identifier=createResult.get('id'), offering_id=fakeName)
assert e.value.code == 404
self.service.delete_catalog(catalog_identifier=createResult.get('id'))
with pytest.raises(ApiException) as e:
self.service.get_offering(catalog_identifier=createResult.get('id'), offering_id=fakeName)
assert e.value.code == 403
def test_list_offerings(self):
expectedFirst = "/api/v1-beta/catalogs/{}/offerings?limit=100&sort=label"
expectedLast = "/api/v1-beta/catalogs/{}/offerings?limit=100&sort=label"
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.create_offering(catalog_identifier=catalogResult.get('id'), name=expectedOfferingName, label=expectedLabel)
offeringResult = offeringResponse.get_result()
listResponse = self.service.list_offerings(catalog_identifier=catalogResult.get('id'))
listResult = listResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert listResponse is not None
assert listResponse.get_status_code() == 200
assert listResult.get('offset') == 0
assert listResult.get('limit') == 100
assert listResult.get('total_count') == 1
assert listResult.get('resource_count') == 1
assert listResult.get('first') == expectedFirst.format(catalogResult.get('id'))
assert listResult.get('last') == expectedLast.format(catalogResult.get('id'))
resources = listResult.get('resources')
assert resources is not None
assert resources[0].get('id') == offeringResult.get('id')
assert resources[0].get('url') == expectedOfferingURL.format(catalogResult.get('id'), offeringResult.get('id'))
assert resources[0].get('label') == expectedLabel
assert resources[0].get('name') == expectedOfferingName
assert resources[0].get('catalog_id') == catalogResult.get('id')
assert resources[0].get('catalog_name') == expectedLabel
def test_delete_offering(self):
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.create_offering(catalog_identifier=catalogResult.get('id'), name=expectedOfferingName, label=expectedLabel)
offeringResult = offeringResponse.get_result()
deleteResponse = self.service.delete_offering(catalog_identifier=catalogResult.get('id'), offering_id=offeringResult.get('id'))
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert deleteResponse is not None
assert deleteResponse.get_status_code() == 200
def test_delete_offering_failure(self):
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
deleteResponse = self.service.delete_offering(catalog_identifier=catalogResult.get('id'), offering_id=fakeName)
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert deleteResponse is not None
assert deleteResponse.get_status_code() == 200
with pytest.raises(ApiException) as e:
self.service.delete_offering(catalog_identifier=catalogResult.get('id'), offering_id=fakeName)
assert e.value.code == 403
def test_update_offering(self):
expectedLabelUpdate = "test-update"
expectedShortDescUpdate = "test-desc-update"
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.create_offering(catalog_identifier=catalogResult.get('id'), name=expectedOfferingName, label=expectedLabel)
offeringResult = offeringResponse.get_result()
updateResponse = self.service.replace_offering(catalog_identifier=catalogResult.get('id'), offering_id=offeringResult.get('id'), id=offeringResult.get('id'), rev=offeringResult.get('_rev'), label=expectedLabelUpdate, short_description=expectedShortDescUpdate)
updateResult = updateResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert updateResponse is not None
assert updateResponse.get_status_code() == 200
assert updateResult.get('short_description') == expectedShortDescUpdate
assert updateResult.get('url') == expectedOfferingURL.format(catalogResult.get('id'), offeringResult.get('id'))
assert updateResult.get('label') == expectedLabelUpdate
def test_update_offering_failure(self):
createResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
createResult = createResponse.get_result()
with pytest.raises(ApiException) as e:
self.service.replace_offering(catalog_identifier=createResult.get('id'), offering_id=fakeName, id=fakeName, rev=fakeName)
assert e.value.code == 404
self.service.delete_catalog(catalog_identifier=createResult.get('id'))
with pytest.raises(ApiException) as e:
self.service.replace_offering(catalog_identifier=createResult.get('id'), offering_id=fakeName, id=fakeName, rev=fakeName)
assert e.value.code == 403
def test_get_consumption_offerings(self):
getResponse = self.service.get_consumption_offerings()
assert getResponse is not None
assert getResponse.get_status_code() == 200
getResult = getResponse.get_result()
assert getResult.get('offset') == 0
assert getResult.get('limit') > 0
assert getResult.get('total_count') > 0
assert getResult.get('last') is not None
assert getResult.get('prev') is None
assert getResult.get('next') is not None
assert getResult.get('resources') is not None
def test_import_offering(self):
expectedOfferingZipURL = 'https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.4.0/jenkins-operator.v0.4.0.clusterserviceversion.yaml'
expectedOfferingTargetKind = 'roks'
expectedOfferingVersion = "0.4.0"
expectedJenkinsOfferingName = 'jenkins-operator'
expectedJenkinsOfferingLabel = 'Jenkins Operator'
expectedJenkinsOfferingShortDesc = 'Kubernetes native operator which fully manages Jenkins on Openshift.'
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.import_offering(catalog_identifier=catalogResult.get('id'), zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
offeringResult = offeringResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert offeringResponse is not None
assert offeringResponse.get_status_code() == 201
assert offeringResult.get('name') == expectedJenkinsOfferingName
assert offeringResult.get('url') == expectedOfferingURL.format(catalogResult.get('id'), offeringResult.get('id'))
assert offeringResult.get('label') == expectedJenkinsOfferingLabel
assert offeringResult.get('short_description') == expectedJenkinsOfferingShortDesc
assert offeringResult.get('catalog_name') == expectedLabel
assert offeringResult.get('catalog_id') == catalogResult.get('id')
assert offeringResult.get('kinds') is not None
assert offeringResult.get('kinds')[0].get('target_kind') == expectedOfferingTargetKind
assert offeringResult.get('kinds')[0].get('versions') is not None
assert offeringResult.get('kinds')[0].get('versions')[0].get('version') == expectedOfferingVersion
assert offeringResult.get('kinds')[0].get('versions')[0].get('tgz_url') == expectedOfferingZipURL
def test_import_offering_version(self):
expectedOfferingZipURL = 'https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.3.31/jenkins-operator.v0.3.31.clusterserviceversion.yaml'
expectedOfferingZipURLUpdate = "https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.4.0/jenkins-operator.v0.4.0.clusterserviceversion.yaml"
expectedOfferingTargetKind = 'roks'
expectedOfferingVersion = "0.3.31"
expectedOfferingVersionUpdate = "0.4.0"
expectedJenkinsOfferingName = 'jenkins-operator'
expectedJenkinsOfferingLabel = 'Jenkins Operator'
expectedJenkinsOfferingShortDesc = 'Kubernetes native operator which fully manages Jenkins on Openshift.'
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.import_offering(catalog_identifier=catalogResult.get('id'), zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
offeringResult = offeringResponse.get_result()
versionResponse = self.service.import_offering_version(catalog_identifier=catalogResult.get('id'), offering_id=offeringResult.get('id'), zipurl=expectedOfferingZipURLUpdate, x_auth_token=self.gitToken)
versionResult = versionResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert versionResponse is not None
assert versionResponse.get_status_code() == 201
assert versionResult.get('name') == expectedJenkinsOfferingName
assert versionResult.get('url') == expectedOfferingURL.format(catalogResult.get('id'), offeringResult.get('id'))
assert versionResult.get('label') == expectedJenkinsOfferingLabel
assert versionResult.get('short_description') == expectedJenkinsOfferingShortDesc
assert versionResult.get('catalog_name') == expectedLabel
assert versionResult.get('catalog_id') == catalogResult.get('id')
assert versionResult.get('kinds') is not None
assert versionResult.get('kinds')[0].get('target_kind') == expectedOfferingTargetKind
assert versionResult.get('kinds')[0].get('versions') is not None
assert versionResult.get('kinds')[0].get('versions')[0].get('version') == expectedOfferingVersion
assert versionResult.get('kinds')[0].get('versions')[0].get('tgz_url') == expectedOfferingZipURL
assert versionResult.get('kinds')[0].get('versions')[1].get('version') == expectedOfferingVersionUpdate
assert versionResult.get('kinds')[0].get('versions')[1].get('tgz_url') == expectedOfferingZipURLUpdate
def test_import_offering_version_failure(self):
expectedOfferingZipURL = 'https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.3.31/jenkins-operator.v0.3.31.clusterserviceversion.yaml'
createResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
createResult = createResponse.get_result()
with pytest.raises(ApiException) as e:
self.service.import_offering_version(catalog_identifier=createResult.get('id'), offering_id=fakeName, zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
assert e.value.code == 404
self.service.delete_catalog(catalog_identifier=createResult.get('id'))
with pytest.raises(ApiException) as e:
self.service.import_offering_version(catalog_identifier=createResult.get('id'), offering_id=fakeName, zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
assert e.value.code == 403
def test_reload_offering(self):
expectedOfferingZipURL = "https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.4.0/jenkins-operator.v0.4.0.clusterserviceversion.yaml"
expectedOfferingTargetKind = 'roks'
expectedOfferingVersion = "0.4.0"
expectedJenkinsOfferingName = 'jenkins-operator'
expectedJenkinsOfferingLabel = 'Jenkins Operator'
expectedJenkinsOfferingShortDesc = 'Kubernetes native operator which fully manages Jenkins on Openshift.'
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.import_offering(catalog_identifier=catalogResult.get('id'), zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
offeringResult = offeringResponse.get_result()
reloadResponse = self.service.reload_offering(catalog_identifier=catalogResult.get('id'), offering_id=offeringResult.get('id'), zipurl=expectedOfferingZipURL, target_version=expectedOfferingVersion, x_auth_token=self.gitToken)
reloadResult = reloadResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert reloadResponse is not None
assert reloadResponse.get_status_code() == 200
assert reloadResult.get('name') == expectedJenkinsOfferingName
assert reloadResult.get('url') == expectedOfferingURL.format(catalogResult.get('id'), offeringResult.get('id'))
assert reloadResult.get('label') == expectedJenkinsOfferingLabel
assert reloadResult.get('short_description') == expectedJenkinsOfferingShortDesc
assert reloadResult.get('catalog_name') == expectedLabel
assert reloadResult.get('catalog_id') == catalogResult.get('id')
assert reloadResult.get('kinds') is not None
assert reloadResult.get('kinds')[0].get('target_kind') == expectedOfferingTargetKind
assert reloadResult.get('kinds')[0].get('versions') is not None
assert reloadResult.get('kinds')[0].get('versions')[0].get('version') == expectedOfferingVersion
assert reloadResult.get('kinds')[0].get('versions')[0].get('tgz_url') == expectedOfferingZipURL
def test_reload_offering_failure(self):
expectedOfferingZipURL = 'https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.3.31/jenkins-operator.v0.3.31.clusterserviceversion.yaml'
expectedOfferingVersion = "0.4.0"
createResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
createResult = createResponse.get_result()
with pytest.raises(ApiException) as e:
self.service.reload_offering(catalog_identifier=createResult.get('id'), offering_id=fakeName, zipurl=expectedOfferingZipURL, target_version=expectedOfferingVersion)
assert e.value.code == 404
self.service.delete_catalog(catalog_identifier=createResult.get('id'))
with pytest.raises(ApiException) as e:
self.service.reload_offering(catalog_identifier=createResult.get('id'), offering_id=fakeName, zipurl=expectedOfferingZipURL, target_version=expectedOfferingVersion)
assert e.value.code == 403
def test_get_version(self):
expectedOfferingZipURL = "https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.4.0/jenkins-operator.v0.4.0.clusterserviceversion.yaml"
expectedOfferingTargetKind = 'roks'
expectedOfferingVersion = "0.4.0"
expectedJenkinsOfferingName = 'jenkins-operator'
expectedJenkinsOfferingLabel = 'Jenkins Operator'
expectedJenkinsOfferingShortDesc = 'Kubernetes native operator which fully manages Jenkins on Openshift.'
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.import_offering(catalog_identifier=catalogResult.get('id'), zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
offeringResult = offeringResponse.get_result()
reloadResponse = self.service.get_version(version_loc_id=offeringResult.get('kinds')[0].get('versions')[0].get('version_locator'))
reloadResult = reloadResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert reloadResponse is not None
assert reloadResponse.get_status_code() == 200
assert reloadResult.get('name') == expectedJenkinsOfferingName
assert reloadResult.get('url') == expectedOfferingURL.format(catalogResult.get('id'), offeringResult.get('id'))
assert reloadResult.get('label') == expectedJenkinsOfferingLabel
assert reloadResult.get('short_description') == expectedJenkinsOfferingShortDesc
assert reloadResult.get('catalog_name') == expectedLabel
assert reloadResult.get('catalog_id') == catalogResult.get('id')
assert reloadResult.get('kinds') is not None
assert reloadResult.get('kinds')[0].get('target_kind') == expectedOfferingTargetKind
assert reloadResult.get('kinds')[0].get('versions') is not None
assert reloadResult.get('kinds')[0].get('versions')[0].get('version') == expectedOfferingVersion
assert reloadResult.get('kinds')[0].get('versions')[0].get('tgz_url') == expectedOfferingZipURL
def test_get_version_failure(self):
with pytest.raises(ApiException) as e:
self.service.get_version(version_loc_id=fakeVersionLocator)
assert e.value.code == 404
def test_delete_version(self):
expectedOfferingZipURL = "https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.4.0/jenkins-operator.v0.4.0.clusterserviceversion.yaml"
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.import_offering(catalog_identifier=catalogResult.get('id'), zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
offeringResult = offeringResponse.get_result()
reloadResponse = self.service.delete_version(version_loc_id=offeringResult.get('kinds')[0].get('versions')[0].get('version_locator'))
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert reloadResponse is not None
assert reloadResponse.get_status_code() == 200
def test_delete_version_failure(self):
with pytest.raises(ApiException) as e:
self.service.delete_version(version_loc_id=fakeVersionLocator)
assert e.value.code == 404
def test_get_version_about(self):
expectedOfferingZipURL = "https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.4.0/jenkins-operator.v0.4.0.clusterserviceversion.yaml"
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.import_offering(catalog_identifier=catalogResult.get('id'), zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
offeringResult = offeringResponse.get_result()
getResponse = self.service.get_version_about(version_loc_id=offeringResult.get('kinds')[0].get('versions')[0].get('version_locator'))
getResult = getResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert getResponse is not None
assert getResponse.get_status_code() == 200
assert getResult is not None
def test_get_version_about_failure(self):
with pytest.raises(ApiException) as e:
self.service.get_version_about(version_loc_id=fakeVersionLocator)
assert e.value.code == 404
def test_get_version_updates(self):
expectedOfferingZipURL = 'https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.3.31/jenkins-operator.v0.3.31.clusterserviceversion.yaml'
expectedOfferingZipURLUpdate = "https://github.com/operator-framework/community-operators/blob/master/community-operators/jenkins-operator/0.4.0/jenkins-operator.v0.4.0.clusterserviceversion.yaml"
expectedOfferingVersionUpdate = "0.4.0"
catalogResponse = self.service.create_catalog(label=expectedLabel, short_description=expectedShortDesc)
catalogResult = catalogResponse.get_result()
offeringResponse = self.service.import_offering(catalog_identifier=catalogResult.get('id'), zipurl=expectedOfferingZipURL, x_auth_token=self.gitToken)
offeringResult = offeringResponse.get_result()
versionResponse = self.service.import_offering_version(catalog_identifier=catalogResult.get('id'), offering_id=offeringResult.get('id'), zipurl=expectedOfferingZipURLUpdate, x_auth_token=self.gitToken)
versionResult = versionResponse.get_result()
updateResponse = self.service.get_version_updates(version_loc_id=offeringResult.get('kinds')[0].get('versions')[0].get('version_locator'))
updateResult = updateResponse.get_result()
self.service.delete_catalog(catalog_identifier=catalogResult.get('id'))
assert updateResponse is not None
assert updateResponse.get_status_code() == 200
assert updateResult is not None
assert updateResult[0].get('version_locator') == versionResult.get('kinds')[0].get('versions')[1].get('version_locator')
assert updateResult[0].get('version') == expectedOfferingVersionUpdate
assert updateResult[0].get('package_version') == expectedOfferingVersionUpdate
assert updateResult[0].get('can_update') is True
def test_get_version_updates_failure(self):
with pytest.raises(ApiException) as e:
self.service.get_version_updates(version_loc_id=fakeVersionLocator)
assert e.value.code == 404
def test_get_license_providers(self):
expectedTotalResults = 1
expectedTotalPages = 1
expectedName = "IBM Passport Advantage"
expectedOfferingType = "content"
expectedCreateURL = "https://www.ibm.com/software/passportadvantage/aboutpassport.html"
expectedInfoURL = "https://www.ibm.com/software/passportadvantage/"
expectedURL = "/v1/licensing/license_providers/11cabc37-c4a7-410b-894d-8cb3586423f1"
expectedState = "active"
listResponse = self.service.get_license_providers()
assert listResponse is not None
assert listResponse.get_status_code() == 200
listResult = listResponse.get_result()
assert listResult.get('total_results') == expectedTotalResults
assert listResult.get('total_pages') == expectedTotalPages
resources = listResult.get('resources')
assert resources is not None
assert len(resources) == 1
assert resources[0].get('name') == expectedName
assert resources[0].get('offering_type') == expectedOfferingType
assert resources[0].get('create_url') == expectedCreateURL
assert resources[0].get('info_url') == expectedInfoURL
assert resources[0].get('url') == expectedURL
assert resources[0].get('state') == expectedState
def test_list_license_entitlements(self):
expectedResourceCount = 0
expectedTotalResults = 0
expectedTotalPages = 1
listResponse = self.service.list_license_entitlements()
assert listResponse is not None
assert listResponse.get_status_code() == 200
listResult = listResponse.get_result()
assert listResult.get('total_results') == expectedTotalResults
assert listResult.get('total_pages') == expectedTotalPages
resources = listResult.get('resources')
assert len(resources) == expectedResourceCount
def test_search_license_versions(self):
with pytest.raises(ApiException) as e:
self.service.search_license_versions(q=fakeName)
assert e.value.code == 403
def test_search_license_offerings(self):
with pytest.raises(ApiException) as e:
self.service.search_license_offerings(q=fakeName)
assert e.value.code == 403
if __name__ == '__main__':
unittest.main()
| 53.319602
| 267
| 0.726909
| 3,995
| 37,537
| 6.683605
| 0.076095
| 0.020598
| 0.033032
| 0.021348
| 0.842515
| 0.794877
| 0.769409
| 0.740572
| 0.721359
| 0.677428
| 0
| 0.011732
| 0.164345
| 37,537
| 703
| 268
| 53.395448
| 0.839486
| 0.024429
| 0
| 0.53861
| 0
| 0.027027
| 0.144009
| 0.006369
| 0
| 0
| 0
| 0
| 0.405405
| 1
| 0.07529
| false
| 0.005792
| 0.040541
| 0
| 0.117761
| 0.001931
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ee02e35bf05df1675a70d49e70f809fd4629793
| 3,072
|
py
|
Python
|
python/Day13.py
|
Simik31/AOC-2021
|
fc0459cf4f3af8439657969b4e957a35e5d56484
|
[
"WTFPL"
] | null | null | null |
python/Day13.py
|
Simik31/AOC-2021
|
fc0459cf4f3af8439657969b4e957a35e5d56484
|
[
"WTFPL"
] | null | null | null |
python/Day13.py
|
Simik31/AOC-2021
|
fc0459cf4f3af8439657969b4e957a35e5d56484
|
[
"WTFPL"
] | null | null | null |
def part_1() -> None:
paper: list[list[bool]] = []
with open("../data/day13.txt", "r") as dFile:
while (line := dFile.readline().strip()) != "":
x, y = line.split(",")
x, y = int(x), int(y)
while y not in range(len(paper)):
paper.append([])
while x not in range(len(paper[y])):
paper[y].append(False)
paper[y][x] = True
longest: int = max(len(line) for line in paper)
for line in paper:
while len(line) < longest:
line.append(False)
fold_line = dFile.readline()
interest = fold_line[fold_line.strip().rfind(" ") + 1 :]
fold_axis, fold_index = interest.split("=")
fold_index = int(fold_index)
if fold_axis == "x":
for line in range(len(paper)):
for index in range(longest - fold_index - 1):
paper[line][index] |= paper[line][longest - index - 1]
paper[line] = paper[line][:fold_index]
elif fold_axis == "y":
for line in range(len(paper) - fold_index - 1):
for index in range(longest):
paper[line][index] |= paper[len(paper) - line - 1][index]
paper = paper[:fold_index]
result = sum(1 for line in paper for char in line if char)
print("Day: 13 | Part: 1 | Result:", result)
def part_2() -> None:
paper: list[list[bool]] = []
with open("../data/day13.txt", "r") as dFile:
while (line := dFile.readline().strip()) != "":
x, y = line.split(",")
x, y = int(x), int(y)
while y not in range(len(paper)):
paper.append([])
while x not in range(len(paper[y])):
paper[y].append(False)
paper[y][x] = True
longest: int = max(len(line) for line in paper)
for line in paper:
while len(line) < longest:
line.append(False)
for fold_line in dFile.readlines():
interest = fold_line[fold_line.strip().rfind(" ") + 1 :]
fold_axis, fold_index = interest.split("=")
fold_index = int(fold_index)
longest = max(len(line) for line in paper)
if fold_axis == "x":
for line in range(len(paper)):
for index in range(longest - fold_index - 1):
paper[line][index] |= paper[line][longest - index - 1]
paper[line] = paper[line][:fold_index]
elif fold_axis == "y":
for line in range(len(paper) - fold_index - 1):
for index in range(longest):
paper[line][index] |= paper[len(paper) - line - 1][index]
paper = paper[:fold_index]
print("Day: 13 | Part: 2 | Result:")
for line in paper:
for char in line:
print("#" if char else " ", end="")
print()
if __name__ == "__main__":
part_1()
part_2()
| 32.336842
| 81
| 0.488932
| 381
| 3,072
| 3.84252
| 0.136483
| 0.086066
| 0.067623
| 0.081967
| 0.878415
| 0.878415
| 0.878415
| 0.862022
| 0.825137
| 0.825137
| 0
| 0.012893
| 0.368815
| 3,072
| 94
| 82
| 32.680851
| 0.742135
| 0
| 0
| 0.797101
| 0
| 0
| 0.035807
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0
| 0
| 0
| 0.028986
| 0.057971
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f084c0fa221e7d8955628804b2424f2091ebf9f
| 129,704
|
py
|
Python
|
dlkit/handcar/repository/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/handcar/repository/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/handcar/repository/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
# -*- coding: utf-8 -*-
# This module contains all the Manager classes used by the MIT Core Concept
# Catalog (MC3) Handcar based implementation of the OSID Repository Service.
from ...abstract_osid.repository import managers as abc_repository_managers
from ..osid import managers as osid_managers
from .. import profile
from ..primitives import Id, DisplayText, Type
from ..type.objects import TypeList
from ..osid.osid_errors import NotFound, NullArgument, OperationFailed, Unimplemented
class RepositoryProfile(abc_repository_managers.RepositoryProfile, osid_managers.OsidProfile):
"""The repository profile describes interoperability among repository
services."""
def supports_visible_federation(self):
"""Tests if federation is visible.
return: (boolean) - true if visible federation is supported ,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_visible_federation' in profile.SUPPORTS
def supports_asset_lookup(self):
"""Tests if asset lookup is supported.
return: (boolean) - true if asset lookup is supported , false
otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_lookup' in profile.SUPPORTS
def supports_asset_query(self):
"""Tests if asset query is supported.
return: (boolean) - true if asset query is supported , false
otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_query' in profile.SUPPORTS
def supports_asset_search(self):
"""Tests if asset search is supported.
return: (boolean) - true if asset search is supported , false
otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_search' in profile.SUPPORTS
def supports_asset_admin(self):
"""Tests if asset administration is supported.
return: (boolean) - true if asset administration is supported,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_admin' in profile.SUPPORTS
def supports_asset_notification(self):
"""Tests if asset notification is supported.
A repository may send messages when assets are created,
modified, or deleted.
return: (boolean) - true if asset notification is supported ,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_notification' in profile.SUPPORTS
def supports_asset_repository(self):
"""Tests if retrieving mappings of assets and repositories is
supported.
return: (boolean) - true if asset repository mapping retrieval
is supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_repository' in profile.SUPPORTS
def supports_asset_repository_assignment(self):
"""Tests if managing mappings of assets and repositories is
supported.
return: (boolean) - true if asset repository assignment is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_repository_assignment' in profile.SUPPORTS
def supports_asset_smart_repository(self):
"""Tests if asset smart repository is supported.
return: (boolean) - true if asset smart repository is supported
, false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_smart_repository' in profile.SUPPORTS
def supports_asset_temporal(self):
"""Tests if retrieving mappings of assets and time coverage is
supported.
return: (boolean) - true if asset temporal mapping retrieval is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_temporal' in profile.SUPPORTS
def supports_asset_temporal_assignment(self):
"""Tests if managing mappings of assets and time ocverage is
supported.
return: (boolean) - true if asset temporal assignment is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_temporal_assignment' in profile.SUPPORTS
def supports_asset_spatial(self):
"""Tests if retrieving mappings of assets and spatial coverage is
supported.
return: (boolean) - true if asset spatial mapping retrieval is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_spatial' in profile.SUPPORTS
def supports_asset_spatial_assignment(self):
"""Tests if managing mappings of assets and spatial ocverage is
supported.
return: (boolean) - true if asset spatial assignment is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_spatial_assignment' in profile.SUPPORTS
def supports_asset_composition(self):
"""Tests if assets are included in compositions.
return: (boolean) - true if asset composition supported , false
otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_composition' in profile.SUPPORTS
def supports_asset_composition_design(self):
"""Tests if mapping assets to compositions is supported.
return: (boolean) - true if designing asset compositions is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_asset_composition_design' in profile.SUPPORTS
def supports_composition_lookup(self):
"""Tests if composition lookup is supported.
return: (boolean) - true if composition lookup is supported ,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_composition_lookup' in profile.SUPPORTS
def supports_composition_query(self):
"""Tests if composition query is supported.
return: (boolean) - true if composition query is supported ,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_composition_query' in profile.SUPPORTS
def supports_composition_search(self):
"""Tests if composition search is supported.
return: (boolean) - true if composition search is supported ,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_composition_search' in profile.SUPPORTS
def supports_composition_admin(self):
"""Tests if composition administration is supported.
return: (boolean) - true if composition administration is
supported, false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_composition_admin' in profile.SUPPORTS
def supports_composition_notification(self):
"""Tests if composition notification is supported.
return: (boolean) - true if composition notification is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_composition_notification' in profile.SUPPORTS
def supports_composition_repository(self):
"""Tests if retrieval of composition to repository mappings is
supported.
return: (boolean) - true if composition to repository mapping is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_composition_repository' in profile.SUPPORTS
def supports_composition_repository_assignment(self):
"""Tests if assigning composition to repository mappings is
supported.
return: (boolean) - true if composition to repository assignment
is supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_composition_repository_assignment' in profile.SUPPORTS
def supports_composition_smart_repository(self):
"""Tests if composition smart repository is supported.
return: (boolean) - true if composition smart repository is
supported , false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_composition_smart_repository' in profile.SUPPORTS
def supports_repository_lookup(self):
"""Tests if repository lookup is supported.
return: (boolean) - true if repository lookup is supported ,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_lookup' in profile.SUPPORTS
def supports_repository_query(self):
"""Tests if repository query is supported.
return: (boolean) - true if repository query is supported ,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_query' in profile.SUPPORTS
def supports_repository_search(self):
"""Tests if repository search is supported.
return: (boolean) - true if repository search is supported ,
false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_search' in profile.SUPPORTS
def supports_repository_admin(self):
"""Tests if repository administration is supported.
return: (boolean) - true if repository administration is
supported, false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_admin' in profile.SUPPORTS
def supports_repository_notification(self):
"""Tests if repository notification is supported.
Messages may be sent when Repository objects are created,
deleted or updated. Notifications for assets within repositories
are sent via the asset notification session.
return: (boolean) - true if repository notification is supported
, false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_notification' in profile.SUPPORTS
def supports_repository_hierarchy(self):
"""Tests if a repository hierarchy traversal is supported.
return: (boolean) - true if a repository hierarchy traversal is
supported, false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_hierarchy' in profile.SUPPORTS
def supports_repository_hierarchy_design(self):
"""Tests if a repository hierarchy design is supported.
return: (boolean) - true if a repository hierarchy design is
supported, false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_hierarchy_design' in profile.SUPPORTS
def supports_repository_batch(self):
"""Tests if a repository batch service is supported.
return: (boolean) - true if a repository batch service is
supported, false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_batch' in profile.SUPPORTS
def supports_repository_rules(self):
"""Tests if a repository rules service is supported.
return: (boolean) - true if a repository rules service is
supported, false otherwise
compliance: mandatory - This method must be implemented.
"""
return 'supports_repository_rules' in profile.SUPPORTS
def get_asset_record_types(self):
"""Gets all the asset record types supported.
return: (osid.type.TypeList) - the list of supported asset
record types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_asset_record_type(self, asset_record_type=None):
"""Tests if a given asset type is supported.
arg: asset_record_type (osid.type.Type): the asset record
type
return: (boolean) - true if the asset record type is supported ,
false otherwise
raise: NullArgument - asset_record_type is null
compliance: mandatory - This method must be implemented.
"""
return False
def get_asset_search_record_types(self):
"""Gets all the asset search record types supported.
return: (osid.type.TypeList) - the list of supported asset
search record types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_asset_search_record_type(self, asset_search_record_type=None):
"""Tests if a given asset search record type is supported.
arg: asset_search_record_type (osid.type.Type): the asset
search record type
return: (boolean) - true if the asset search record type is
supported , false otherwise
raise: NullArgument - asset_search_record_type is null
compliance: mandatory - This method must be implemented.
"""
return False
def get_asset_content_record_types(self):
"""Gets all the asset content record types supported.
return: (osid.type.TypeList) - the list of supported asset
content record types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_asset_content_record_type(self, asset_content_record_type=None):
"""Tests if a given asset content record type is supported.
arg: asset_content_record_type (osid.type.Type): the asset
content record type
return: (boolean) - true if the asset content record type is
supported , false otherwise
raise: NullArgument - asset_content_record_type is null
compliance: mandatory - This method must be implemented.
"""
return False
def get_composition_record_types(self):
"""Gets all the composition record types supported.
return: (osid.type.TypeList) - the list of supported composition
record types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_composition_record_type(self, composition_record_type=None):
"""Tests if a given composition record type is supported.
arg: composition_record_type (osid.type.Type): the
composition record type
return: (boolean) - true if the composition record type is
supported , false otherwise
raise: NullArgument - composition_record_type is null
compliance: mandatory - This method must be implemented.
"""
return False
def get_composition_search_record_types(self):
"""Gets all the composition search record types supported.
return: (osid.type.TypeList) - the list of supported composition
search record types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_composition_search_record_type(self, composition_search_record_type=None):
"""Tests if a given composition search record type is supported.
arg: composition_search_record_type (osid.type.Type): the
composition serach type
return: (boolean) - true if the composition search record type
is supported , false otherwise
raise: NullArgument - composition_search_record_type is null
compliance: mandatory - This method must be implemented.
"""
return False
def get_repository_record_types(self):
"""Gets all the repository record types supported.
return: (osid.type.TypeList) - the list of supported repository
record types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_repository_record_type(self, repository_record_type=None):
"""Tests if a given repository record type is supported.
arg: repository_record_type (osid.type.Type): the repository
record type
return: (boolean) - true if the repository record type is
supported , false otherwise
raise: NullArgument - repository_record_type is null
compliance: mandatory - This method must be implemented.
"""
return False
def get_repository_search_record_types(self):
"""Gets all the repository search record types supported.
return: (osid.type.TypeList) - the list of supported repository
search record types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_repository_search_record_type(self, repository_search_record_type=None):
"""Tests if a given repository search record type is supported.
arg: repository_search_record_type (osid.type.Type): the
repository search type
return: (boolean) - true if the repository search record type is
supported , false otherwise
raise: NullArgument - repository_search_record_type is null
compliance: mandatory - This method must be implemented.
"""
return False
def get_spatial_unit_record_types(self):
"""Gets all the spatial unit record types supported.
return: (osid.type.TypeList) - the list of supported spatial
unit record types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_spatial_unit_record_type(self, spatial_unit_record_type=None):
"""Tests if a given spatial unit record type is supported.
arg: spatial_unit_record_type (osid.type.Type): the spatial
unit record type
return: (boolean) - true if the spatial unit record type is
supported , false otherwise
raise: NullArgument - spatial_unit_record_type is null
compliance: mandatory - This method must be implemented.
"""
return False
def get_coordinate_types(self):
"""Gets all the coordinate types supported.
return: (osid.type.TypeList) - the list of supported coordinate
types
compliance: mandatory - This method must be implemented.
"""
return TypeList([])
def supports_coordinate_type(self, coordinate_type=None):
"""Tests if a given coordinate type is supported.
arg: coordinate_type (osid.type.Type): the coordinate type
return: (boolean) - true if the coordinate type is supported ,
false otherwise
raise: NullArgument - coordinate_type is null
compliance: mandatory - This method must be implemented.
"""
return False
class RepositoryManager(abc_repository_managers.RepositoryManager, osid_managers.OsidManager, RepositoryProfile):
"""The repository manager provides access to asset lookup and creation
session and provides interoperability tests for various aspects of
this service.
The sessions included in this manager are:
> AssetLookupSession: a session to retrieve assets
> AssetQuerySession: a session to query assets
> AssetSearchSession: a session to search for assets
> AssetAdminSession: a session to create and delete assets
> AssetNotificationSession: a session to receive notifications
pertaining to asset changes
> AssetRepositorySession: a session to look up asset to repository
mappings
> AssetRepositoryAssignmentSession: a session to manage asset to
repository mappings
> AssetSmartRepositorySession: a session to manage dynamic
repositories of assets
> AssetTemporalSession: a session to access the temporal coverage
of an asset
> AssetTemporalAssignmentSession: a session to manage the temporal
coverage of an asset
> AssetSpatialSession: a session to access the spatial coverage of
an asset
> AssetSpatialAssignmentSession: a session to manage the spatial
coverage of an asset
> AssetCompositionSession: a session to look up asset composition
mappings
> AssetCompositionDesignSession: a session to map assets to
compositions
> CompositionLookupSession: a session to retrieve compositions
> CompositionQuerySession: a session to query compositions
> CompositionSearchSession: a session to search for compositions
> CompositionAdminSession: a session to create, update and delete
compositions
> CompositionNotificationSession: a session to receive
notifications pertaining to changes in compositions
> CompositionRepositorySession: a session to retrieve composition
repository mappings
> CompositionRepositoryAssignmentSession: a session to manage
composition repository mappings
> CompositionSmartRepositorySession: a session to manage dynamic
repositories of compositions
> RepositoryLookupSession: a session to retrieve repositories
> RepositoryQuerySession: a session to query repositories
> RepositorySearchSession: a session to search for repositories
> RepositoryAdminSession: a session to create, update and delete
repositories
> RepositoryNotificationSession: a session to receive
notifications pertaining to changes in repositories
> RepositoryHierarchySession: a session to traverse repository
hierarchies
> RepositoryHierarchyDesignSession: a session to manage repository
hierarchies
"""
def get_asset_lookup_session(self, *args, **kwargs):
"""Gets the OsidSession associated with the asset lookup service.
return: (osid.repository.AssetLookupSession) - the new
AssetLookupSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_lookup() is false
compliance: optional - This method must be implemented if
supports_asset_lookup() is true.
"""
if not self.supports_asset_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetLookupSession(proxy=self._proxy,
runtime=self._runtime, **kwargs)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_lookup_session_for_repository(self, repository_id=None, *args, **kwargs):
"""Gets the OsidSession associated with the asset lookup service
for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetLookupSession) - the new
AssetLookupSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_lookup() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_lookup() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetLookupSession(repository_id,
proxy=self._proxy,
runtime=self._runtime, **kwargs)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_query_session(self):
"""Gets an asset query session.
return: (osid.repository.AssetQuerySession) - an
AssetQuerySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_query() is false
compliance: optional - This method must be implemented if
supports_asset_query() is true.
"""
if not self.supports_asset_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetQuerySession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_query_session_for_repository(self, repository_id=None):
"""Gets an asset query session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetQuerySession) - an
AssetQuerySession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_query() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_query() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetQuerySession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_search_session(self):
"""Gets an asset search session.
return: (osid.repository.AssetSearchSession) - an
AssetSearchSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_search() is false
compliance: optional - This method must be implemented if
supports_asset_search() is true.
"""
if not self.supports_asset_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetSearchSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_search_session_for_repository(self, repository_id=None):
"""Gets an asset search session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetSearchSession) - an
AssetSearchSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_search() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_search() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetSearchSession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_admin_session(self, *args, **kwargs):
"""Gets an asset administration session for creating, updating and
deleting assets.
return: (osid.repository.AssetAdminSession) - an
AssetAdminSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_admin() is false
compliance: optional - This method must be implemented if
supports_asset_admin() is true.
"""
if not self.supports_asset_admin():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetAdminSession(proxy=self._proxy,
runtime=self._runtime, **kwargs)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_admin_session_for_repository(self, repository_id=None, *args, **kwargs):
"""Gets an asset administration session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetAdminSession) - an
AssetAdminSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_admin() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_admin() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_admin():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetAdminSession(repository_id,
proxy=self._proxy,
runtime=self._runtime, **kwargs)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_notification_session(self, asset_receiver=None):
"""Gets the notification session for notifications pertaining to
asset changes.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
return: (osid.repository.AssetNotificationSession) - an
AssetNotificationSession
raise: NullArgument - asset_receiver is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_notification() is false
compliance: optional - This method must be implemented if
supports_asset_notification() is true.
"""
if asset_receiver is None:
raise NullArgument()
if not self.supports_asset_notification():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetNotificationSession(asset_receiver,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_notification_session_for_repository(self, asset_receiver=None, repository_id=None):
"""Gets the asset notification session for the given repository.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetNotificationSession) - an
AssetNotificationSession
raise: NotFound - repository_id not found
raise: NullArgument - asset_receiver or repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_notification() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_notfication() and
supports_visible_federation() are true.
"""
if not repository_id or not asset_receiver:
raise NullArgument()
if not self.supports_asset_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetAdminSession(asset_receiver,
repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_repository_session(self):
"""Gets the session for retrieving asset to repository mappings.
return: (osid.repository.AssetRepositorySession) - an
AssetRepositorySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_repository() is false
compliance: optional - This method must be implemented if
supports_asset_repository() is true.
"""
if not self.supports_asset_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetRespositorySession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_repository_assignment_session(self):
"""Gets the session for assigning asset to repository mappings.
return: (osid.repository.AssetRepositoryAssignmentSession) - an
AssetRepositoryAsignmentSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_repository_assignment()
is false
compliance: optional - This method must be implemented if
supports_asset_repository_assignment() is true.
"""
if not self.supports_asset_repository_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetRespositoryAssignmentSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_smart_repository_session(self, repository_id=None):
"""Gets an asset smart repository session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetSmartRepositorySession) - an
AssetSmartRepositorySession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_smart_repository() false
compliance: optional - This method must be implemented if
supports_asset_smart_repository() is true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_smart_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetSmartRepositorySession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_temporal_session(self):
"""Gets the session for retrieving temporal coverage of an asset.
return: (osid.repository.AssetTemporalSession) - an
AssetTemporalSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal() is false
compliance: optional - This method must be implemented if
supports_asset_temporal() is true.
"""
if not self.supports_asset_temporal():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetTemporalSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_temporal_session_for_repository(self, repository_id=None):
"""Gets the session for retrieving temporal coverage of an asset
for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetTemporalSession) - an
AssetTemporalSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_temporal() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_temporal():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetTemporalSession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_temporal_assignment_session(self):
"""Gets the session for assigning temporal coverage to an asset.
return: (osid.repository.AssetTemporalAssignmentSession) - an
AssetTemporalAssignmentSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal_assignment() is
false
compliance: optional - This method must be implemented if
supports_asset_temporal_assignment() is true.
"""
if not self.supports_asset_temporal_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetTemporalAssignmentSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_temporal_assignment_session_for_repository(self, repository_id=None):
"""Gets the session for assigning temporal coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetTemporalAssignmentSession) - an
AssetTemporalAssignmentSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal_assignment() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_temporal_assignment() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_temporal_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetTemporalAssignmentSession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_spatial_session(self):
"""Gets the session for retrieving spatial coverage of an asset.
return: (osid.repository.AssetSpatialSession) - an
AssetSpatialSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_spatial_assets() is false
compliance: optional - This method must be implemented if
supports_spatial_assets() is true.
"""
if not self.supports_spatial_asset():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetSpatialSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_spatial_session_for_repository(self, repository_id=None):
"""Gets the session for retrieving spatial coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetSpatialSession) - an
AssetSpatialSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_spatial() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_spatial() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_spatial() or not self.supports_visible_federation():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetSpatialSession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_spatial_assignment_session(self):
"""Gets the session for assigning spatial coverage to an asset.
return: (osid.repository.AssetSpatialAssignmentSession) - an
AssetSpatialAssignmentSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_spatial_assignment() is
false
compliance: optional - This method must be implemented if
supports_asset_spatial_assignment() is true.
"""
if not self.supports_asset_spatial_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetSpatialAssignmentSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_spatial_assignment_session_for_repository(self, repository_id=None):
"""Gets the session for assigning spatial coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.AssetSpatialAssignmentSession) - an
AssetSpatialAssignmentSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_spatial_assignment() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_spatial_assignment() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_spatial_assignment() or not self.supports_visible_federation():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetSpatialAssignmentSession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_composition_session(self):
"""Gets the session for retrieving asset compositions.
return: (osid.repository.AssetCompositionSession) - an
AssetCompositionSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_composition() is false
compliance: optional - This method must be implemented if
supports_asset_composition() is true.
"""
if not self.supports_asset_composition():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.AssetCompositionSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_composition_design_session(self):
"""Gets the session for creating asset compositions.
return: (osid.repository.AssetCompositionDesignSession) - an
AssetCompositionDesignSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_composition_design() is
false
compliance: optional - This method must be implemented if
supports_asset_composition_design() is true.
"""
if not self.supports_asset_composition_design():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
try:
session = sessions.AssetCompositionDesignSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_composition_lookup_session(self):
"""Gets the OsidSession associated with the composition lookup
service.
return: (osid.repository.CompositionLookupSession) - the new
CompositionLookupSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_lookup() is false
compliance: optional - This method must be implemented if
supports_composition_lookup() is true.
"""
if not self.supports_composition_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionLookupSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_lookup_session_for_repository(self, repository_id=None):
"""Gets the OsidSession associated with the composition lookup
service for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.CompositionLookupSession) - the new
CompositionLookupSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_lookup() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_lookup() and
supports_visible_federation() are true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionLookupSession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_query_session(self):
"""Gets a composition query session.
return: (osid.repository.CompositionQuerySession) - a
CompositionQuerySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_query() is false
compliance: optional - This method must be implemented if
supports_composition_query() is true.
"""
if not self.supports_composition_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionQuerySession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_query_session_for_repository(self, repository_id=None):
"""Gets a composition query session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.CompositionQuerySession) - a
CompositionQuerySession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_query() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_query() and
supports_visible_federation() are true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionQuerySession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_search_session(self):
"""Gets a composition search session.
return: (osid.repository.CompositionSearchSession) - a
CompositionSearchSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_search() is false
compliance: optional - This method must be implemented if
supports_composition_search() is true.
"""
if not self.supports_composition_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionSearchSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_search_session_for_repository(self, repository_id=None):
"""Gets a composition search session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.CompositionSearchSession) - a
CompositionSearchSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_search() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_search() and
supports_visible_federation() are true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_search() or not self.supports_visible_federation():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionSearchSession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_admin_session(self):
"""Gets a composition administration session for creating, updating
and deleting compositions.
return: (osid.repository.CompositionAdminSession) - a
CompositionAdminSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_admin() is false
compliance: optional - This method must be implemented if
supports_composition_admin() is true.
"""
if not self.supports_composition_admin():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionAdminSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_admin_session_for_repository(self, repository_id=None):
"""Gets a composiiton administrative session for the given
repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.CompositionAdminSession) - a
CompositionAdminSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_admin() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_admin() and
supports_visible_federation() are true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_admin() or not self.supports_visible_federation():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionSearchSession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_notification_session(self, composition_receiver=None):
"""Gets the notification session for notifications pertaining to
composition changes.
arg: composition_receiver
(osid.repository.CompositionReceiver): the notification
callback
return: (osid.repository.CompositionNotificationSession) - a
CompositionNotificationSession
raise: NullArgument - composition_receiver is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_notification() is
false
compliance: optional - This method must be implemented if
supports_composition_notification() is true.
"""
if composition_receiver is None:
raise NullArgument()
if not self.supports_composition_notification():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionNotificationSession(composition_receiver,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_notification_session_for_repository(self, composition_receiver=None, repository_id=None):
"""Gets the composition notification session for the given
repository.
arg: composition_receiver
(osid.repository.CompositionReceiver): the notification
callback
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.CompositionNotificationSession) - a
CompositionNotificationSession
raise: NotFound - repository_id not found
raise: NullArgument - composition_receiver or repository_id is
null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_notification() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_notfication() and
supports_visible_federation() are true.
"""
if composition_receiver is None or repository_id is None:
raise NullArgument()
if not self.supports_composition_notification():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionNotificationSession(composition_receiver,
repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_repository_session(self):
"""Gets the session for retrieving composition to repository
mappings.
return: (osid.repository.CompositionRepositorySession) - a
CompositionRepositorySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_repository() is
false
compliance: optional - This method must be implemented if
supports_composition_repository() is true.
"""
if not self.supports_composition_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionRepositorySession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_repository_assignment_session(self):
"""Gets the session for assigning composition to repository
mappings.
return: (osid.repository.CompositionRepositoryAssignmentSession)
- a CompositionRepositoryAssignmentSession
raise: OperationFailed - unable to complete request
raise: Unimplemented -
supports_composition_repository_assignment() is false
compliance: optional - This method must be implemented if
supports_composition_repository_assignment() is
true.
"""
if not self.supports_composition_repository_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionRepositoryAssignmentSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_smart_repository_session(self, repository_id=None):
"""Gets a composition smart repository session for the given
repository.
arg: repository_id (osid.id.Id): the Id of the repository
return: (osid.repository.CompositionSmartRepositorySession) - a
CompositionSmartRepositorySession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_smart_repository()
false
compliance: optional - This method must be implemented if
supports_composition_smart_repository() is true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_smart_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.CompositionSmartRepositorySession(repository_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_lookup_session(self, *args, **kwargs):
"""Gets the repository lookup session.
return: (osid.repository.RepositoryLookupSession) - a
RepositoryLookupSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_lookup() is false
compliance: optional - This method must be implemented if
supports_repository_lookup() is true.
"""
if not self.supports_repository_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.RepositoryLookupSession(proxy=self._proxy,
runtime=self._runtime,
**kwargs)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_query_session(self):
"""Gets the repository query session.
return: (osid.repository.RepositoryQuerySession) - a
RepositoryQuerySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_query() is false
compliance: optional - This method must be implemented if
supports_repository_query() is true.
"""
if not self.supports_repository_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.RepositoryQuerySession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_search_session(self):
"""Gets the repository search session.
return: (osid.repository.RepositorySearchSession) - a
RepositorySearchSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_search() is false
compliance: optional - This method must be implemented if
supports_repository_search() is true.
"""
if not self.supports_repository_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.RepositorySearchSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_admin_session(self):
"""Gets the repository administrative session for creating,
updating and deleteing repositories.
return: (osid.repository.RepositoryAdminSession) - a
RepositoryAdminSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_admin() is false
compliance: optional - This method must be implemented if
supports_repository_admin() is true.
"""
if not self.supports_repository_admin():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.RepositoryAdminSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_notification_session(self, repository_receiver=None):
"""Gets the notification session for subscribing to changes to a
repository.
arg: repository_receiver
(osid.repository.RepositoryReceiver): the notification
callback
return: (osid.repository.RepositoryNotificationSession) - a
RepositoryNotificationSession
raise: NullArgument - repository_receiver is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_notification() is
false
compliance: optional - This method must be implemented if
supports_repository_notification() is true.
"""
if repository_receiver is None:
raise NullArgument()
if not self.supports_repository_notification():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.RepositoryNotificationSession(repository_receiver,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_hierarchy_session(self):
"""Gets the repository hierarchy traversal session.
return: (osid.repository.RepositoryHierarchySession) - a
RepositoryHierarchySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_hierarchy() is false
compliance: optional - This method must be implemented if
supports_repository_hierarchy() is true.
"""
if not self.supports_repository_hierarchy():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.RepositoryHierarchySession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_hierarchy_design_session(self):
"""Gets the repository hierarchy design session.
return: (osid.repository.RepositoryHierarchyDesignSession) - a
RepostoryHierarchyDesignSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_hierarchy_design()
is false
compliance: optional - This method must be implemented if
supports_repository_hierarchy_design() is true.
"""
if not self.supports_repository_hierarchy_design():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
try:
session = sessions.RepositoryHierarchyDesignSession(proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_batch_manager(self):
"""Gets a RepositoryBatchManager.
return: (osid.repository.batch.RepositoryBatchManager) - a
RepostoryBatchManager
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_batch() is false
compliance: optional - This method must be implemented if
supports_repository_batch() is true.
"""
raise Unimplemented()
def get_repository_rules_manager(self):
"""Gets a RepositoryRulesManager.
return: (osid.repository.rules.RepositoryRulesManager) - a
RepostoryRulesManager
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_rules() is false
compliance: optional - This method must be implemented if
supports_repository_rules() is true.
"""
raise Unimplemented()
class RepositoryProxyManager(abc_repository_managers.RepositoryProxyManager,
osid_managers.OsidProxyManager,
RepositoryProfile):
"""The repository manager provides access to asset lookup and creation
session and provides interoperability tests for various aspects of
this service.
Methods in this manager support the passing of a ``Proxy``. The
sessions included in this manager are:
> AssetLookupSession: a session to retrieve assets
> AssetQuerySession: a session to query assets
> AssetSearchSession: a session to search for assets
> AssetAdminSession: a session to create and delete assets
> AssetNotificationSession: a session to receive notifications
pertaining to asset changes
> AssetRepositorySession: a session to look up asset to repository
mappings
> AssetRepositoryAssignmentSession: a session to manage asset to
repository mappings
> AssetSmartRepositorySession: a session to manage dynamic
repositories of assets
> AssetTemporalSession: a session to access the temporal coverage
of an asset
> AssetTemporalAssignmentSession: a session to manage the temporal
coverage of an asset
> AssetSpatialSession: a session to access the spatial coverage of
an asset
> AssetSpatialAssignmentSession: a session to manage the spatial
coverage of an asset
> AssetCompositionSession: a session to look up asset composition
mappings
> AssetCompositionDesignSession: a session to map assets to
compositions
> CompositionLookupSession: a session to retrieve compositions
> CompositionQuerySession: a session to query compositions
> CompositionSearchSession: a session to search for compositions
> CompositionAdminSession: a session to create, update and delete
compositions
> CompositionNotificationSession: a session to receive
notifications pertaining to changes in compositions
> CompositionRepositorySession: a session to retrieve composition
repository mappings
> CompositionRepositoryAssignmentSession: a session to manage
composition repository mappings
> CompositionSmartRepositorySession: a session to manage dynamic
repositories of compositions
> RepositoryLookupSession: a session to retrieve repositories
> RepositoryQuerySession: a session to query repositories
> RepositorySearchSession: a session to search for repositories
> RepositoryAdminSession: a session to create, update and delete
repositories
> RepositoryNotificationSession: a session to receive
notifications pertaining to changes in repositories
> RepositoryHierarchySession: a session to traverse repository
hierarchies
> RepositoryHierarchyDesignSession: a session to manage repository
hierarchies
"""
def get_asset_lookup_session(self, proxy, *args, **kwargs):
"""Gets the OsidSession associated with the asset lookup service.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetLookupSession) - the new
AssetLookupSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_lookup() is false
compliance: optional - This method must be implemented if
supports_asset_lookup() is true.
"""
if not self.supports_asset_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetLookupSession(proxy=proxy, runtime=self._runtime, **kwargs)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_lookup_session_for_repository(self, repository_id, proxy, *args, **kwargs):
"""Gets the OsidSession associated with the asset lookup service
for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetLookupSession) - the new
AssetLookupSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_lookup() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_lookup() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetLookupSession(repository_id, proxy, runtime=self._runtime, **kwargs)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_query_session(self, proxy):
"""Gets an asset query session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetQuerySession) - an
AssetQuerySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_query() is false
compliance: optional - This method must be implemented if
supports_asset_query() is true.
"""
if not self.supports_asset_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetQuerySession(proxy=proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_query_session_for_repository(self, repository_id, proxy):
"""Gets an asset query session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetQuerySession) - an
AssetQuerySession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_query() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_query() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetQuerySession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_search_session(self, proxy):
"""Gets an asset search session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSearchSession) - an
AssetSearchSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_search() is false
compliance: optional - This method must be implemented if
supports_asset_search() is true.
"""
if not self.supports_asset_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetSearchSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_search_session_for_repository(self, repository_id, proxy):
"""Gets an asset search session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSearchSession) - an
AssetSearchSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_search() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_search() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetSearchSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_admin_session(self, proxy, *args, **kwargs):
"""Gets an asset administration session for creating, updating and
deleting assets.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetAdminSession) - an
AssetAdminSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_admin() is false
compliance: optional - This method must be implemented if
supports_asset_admin() is true.
"""
if not self.supports_asset_admin():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetAdminSession(proxy, runtime=self._runtime, **kwargs)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_admin_session_for_repository(self, repository_id, proxy, *args, **kwargs):
"""Gets an asset administration session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetAdminSession) - an
AssetAdminSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_admin() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_admin() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_admin():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetAdminSession(repository_id, proxy, runtime=self._runtime, **kwargs)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_notification_session(self, asset_receiver, proxy):
"""Gets the notification session for notifications pertaining to
asset changes.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetNotificationSession) - an
AssetNotificationSession
raise: NullArgument - asset_receiver is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_notification() is false
compliance: optional - This method must be implemented if
supports_asset_notification() is true.
"""
if asset_receiver is None:
raise NullArgument()
if not self.supports_asset_notification():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetNotificationSession(asset_receiver, proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_notification_session_for_repository(self, asset_receiver, repository_id, proxy):
"""Gets the asset notification session for the given repository.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetNotificationSession) - an
AssetNotificationSession
raise: NotFound - repository_id not found
raise: NullArgument - asset_receiver or repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_notification() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_notfication() and
supports_visible_federation() are true.
"""
if not repository_id or not asset_receiver:
raise NullArgument()
if not self.supports_asset_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetAdminSession(asset_receiver, repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_repository_session(self, proxy):
"""Gets the session for retrieving asset to repository mappings.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetRepositorySession) - an
AssetRepositorySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_repository() is false
compliance: optional - This method must be implemented if
supports_asset_repository() is true.
"""
if not self.supports_asset_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetRespositorySession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_repository_assignment_session(self, proxy):
"""Gets the session for assigning asset to repository mappings.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetRepositoryAssignmentSession) - an
AssetRepositoryAsignmentSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_repository_assignment()
is false
compliance: optional - This method must be implemented if
supports_asset_repository_assignment() is true.
"""
if not self.supports_asset_repository_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetRespositoryAssignmentSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_smart_repository_session(self, repository_id, proxy):
"""Gets an asset smart repository session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSmartRepositorySession) - an
AssetSmartRepositorySession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_smart_repository() false
compliance: optional - This method must be implemented if
supports_asset_smart_repository() is true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_smart_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetSmartRepositorySession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_temporal_session(self, proxy):
"""Gets the session for retrieving temporal coverage of an asset.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalSession) - an
AssetTemporalSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal() is false
compliance: optional - This method must be implemented if
supports_asset_temporal() is true.
"""
if not self.supports_asset_temporal():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetTemporalSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_temporal_session_for_repository(self, repository_id, proxy):
"""Gets the session for retrieving temporal coverage of an asset
for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalSession) - an
AssetTemporalSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_temporal() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_temporal():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetTemporalSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_temporal_assignment_session(self, proxy):
"""Gets the session for assigning temporal coverage to an asset.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalAssignmentSession) - an
AssetTemporalAssignmentSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal_assignment() is
false
compliance: optional - This method must be implemented if
supports_asset_temporal_assignment() is true.
"""
if not self.supports_asset_temporal_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetTemporalAssignmentSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_temporal_assignment_session_for_repository(self, repository_id, proxy):
"""Gets the session for assigning temporal coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalAssignmentSession) - an
AssetTemporalAssignmentSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal_assignment() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_temporal_assignment() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_temporal_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetTemporalAssignmentSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_spatial_session(self, proxy):
"""Gets the session for retrieving spatial coverage of an asset.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSpatialSession) - an
AssetSpatialSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_spatial_assets() is false
compliance: optional - This method must be implemented if
supports_spatial_assets() is true.
"""
if not self.supports_spatial_asset():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetSpatialSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_spatial_session_for_repository(self, repository_id, proxy):
"""Gets the session for retrieving spatial coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSpatialSession) - an
AssetSpatialSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_spatial() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_spatial() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_spatial() or not self.supports_visible_federation():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetSpatialSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_spatial_assignment_session(self, proxy):
"""Gets the session for assigning spatial coverage to an asset.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSpatialAssignmentSession) - an
AssetSpatialAssignmentSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_spatial_assignment() is
false
compliance: optional - This method must be implemented if
supports_asset_spatial_assignment() is true.
"""
if not self.supports_asset_spatial_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetSpatialAssignmentSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_spatial_assignment_session_for_repository(self, repository_id, proxy):
"""Gets the session for assigning spatial coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSpatialAssignmentSession) - an
AssetSpatialAssignmentSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_spatial_assignment() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_spatial_assignment() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_spatial_assignment() or not self.supports_visible_federation():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetSpatialAssignmentSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_asset_composition_session(self, proxy):
"""Gets the session for retrieving asset compositions.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetCompositionSession) - an
AssetCompositionSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_composition() is false
compliance: optional - This method must be implemented if
supports_asset_composition() is true.
"""
if not self.supports_asset_composition():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetCompositionSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_asset_composition_design_session(self, proxy):
"""Gets the session for creating asset compositions.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetCompositionDesignSession) - an
AssetCompositionDesignSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_composition_design() is
false
compliance: optional - This method must be implemented if
supports_asset_composition_design() is true.
"""
if not self.supports_asset_composition_design():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetCompositionDesignSession(proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session
def get_composition_lookup_session(self, proxy):
"""Gets the OsidSession associated with the composition lookup
service.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionLookupSession) - the new
CompositionLookupSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_lookup() is false
compliance: optional - This method must be implemented if
supports_composition_lookup() is true.
"""
if not self.supports_composition_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionLookupSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_lookup_session_for_repository(self, repository_id, proxy):
"""Gets the OsidSession associated with the composition lookup
service for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionLookupSession) - the new
CompositionLookupSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_lookup() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_lookup() and
supports_visible_federation() are true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionLookupSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_query_session(self, proxy):
"""Gets a composition query session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionQuerySession) - a
CompositionQuerySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_query() is false
compliance: optional - This method must be implemented if
supports_composition_query() is true.
"""
if not self.supports_composition_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionQuerySession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_query_session_for_repository(self, repository_id, proxy):
"""Gets a composition query session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionQuerySession) - a
CompositionQuerySession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_query() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_query() and
supports_visible_federation() are true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionQuerySession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_search_session(self, proxy):
"""Gets a composition search session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSearchSession) - a
CompositionSearchSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_search() is false
compliance: optional - This method must be implemented if
supports_composition_search() is true.
"""
if not self.supports_composition_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionSearchSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_search_session_for_repository(self, repository_id, proxy):
"""Gets a composition search session for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSearchSession) - a
CompositionSearchSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_search() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_search() and
supports_visible_federation() are true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_search() or not self.supports_visible_federation():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionSearchSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_admin_session(self, proxy):
"""Gets a composition administration session for creating, updating
and deleting compositions.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionAdminSession) - a
CompositionAdminSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_admin() is false
compliance: optional - This method must be implemented if
supports_composition_admin() is true.
"""
if not self.supports_composition_admin():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionAdminSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_admin_session_for_repository(self, repository_id, proxy):
"""Gets a composiiton administrative session for the given
repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionAdminSession) - a
CompositionAdminSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_admin() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_admin() and
supports_visible_federation() are true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_admin() or not self.supports_visible_federation():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionSearchSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_notification_session(self, composition_receiver, proxy):
"""Gets the notification session for notifications pertaining to
composition changes.
arg: composition_receiver
(osid.repository.CompositionReceiver): the notification
callback
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionNotificationSession) - a
CompositionNotificationSession
raise: NullArgument - composition_receiver is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_notification() is
false
compliance: optional - This method must be implemented if
supports_composition_notification() is true.
"""
if composition_receiver is None:
raise NullArgument()
if not self.supports_composition_notification():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionNotificationSession(composition_receiver, proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_notification_session_for_repository(self, composition_receiver, repository_id, proxy):
"""Gets the composition notification session for the given
repository.
arg: composition_receiver
(osid.repository.CompositionReceiver): the notification
callback
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionNotificationSession) - a
CompositionNotificationSession
raise: NotFound - repository_id not found
raise: NullArgument - composition_receiver or repository_id is
null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_notification() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_composition_notfication() and
supports_visible_federation() are true.
"""
if composition_receiver is None or repository_id is None:
raise NullArgument()
if not self.supports_composition_notification():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionNotificationSession(composition_receiver,
repository_id,
proxy,
runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_repository_session(self, proxy):
"""Gets the session for retrieving composition to repository
mappings.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionRepositorySession) - a
CompositionRepositorySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_repository() is
false
compliance: optional - This method must be implemented if
supports_composition_repository() is true.
"""
if not self.supports_composition_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionRepositorySession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_repository_assignment_session(self, proxy):
"""Gets the session for assigning composition to repository
mappings.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionRepositoryAssignmentSession)
- a CompositionRepositoryAssignmentSession
raise: OperationFailed - unable to complete request
raise: Unimplemented -
supports_composition_repository_assignment() is false
compliance: optional - This method must be implemented if
supports_composition_repository_assignment() is
true.
"""
if not self.supports_composition_repository_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionRepositoryAssignmentSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_composition_smart_repository_session(self, repository_id, proxy):
"""Gets a composition smart repository session for the given
repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSmartRepositorySession) - a
CompositionSmartRepositorySession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_composition_smart_repository()
false
compliance: optional - This method must be implemented if
supports_composition_smart_repository() is true.
"""
if repository_id is None:
raise NullArgument()
if not self.supports_composition_smart_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.CompositionSmartRepositorySession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_lookup_session(self, proxy, *args, **kwargs):
"""Gets the repository lookup session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryLookupSession) - a
RepositoryLookupSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_lookup() is false
compliance: optional - This method must be implemented if
supports_repository_lookup() is true.
"""
if not self.supports_repository_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.RepositoryLookupSession(proxy, runtime=self._runtime, **kwargs)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_query_session(self, proxy):
"""Gets the repository query session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryQuerySession) - a
RepositoryQuerySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_query() is false
compliance: optional - This method must be implemented if
supports_repository_query() is true.
"""
if not self.supports_repository_query():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.RepositoryQuerySession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_search_session(self, proxy):
"""Gets the repository search session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositorySearchSession) - a
RepositorySearchSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_search() is false
compliance: optional - This method must be implemented if
supports_repository_search() is true.
"""
if not self.supports_repository_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.RepositorySearchSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_admin_session(self, proxy):
"""Gets the repository administrative session for creating,
updating and deleteing repositories.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryAdminSession) - a
RepositoryAdminSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_admin() is false
compliance: optional - This method must be implemented if
supports_repository_admin() is true.
"""
if not self.supports_repository_admin():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.RepositoryAdminSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_notification_session(self, repository_receiver, proxy):
"""Gets the notification session for subscribing to changes to a
repository.
arg: repository_receiver
(osid.repository.RepositoryReceiver): the notification
callback
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryNotificationSession) - a
RepositoryNotificationSession
raise: NullArgument - repository_receiver is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_notification() is
false
compliance: optional - This method must be implemented if
supports_repository_notification() is true.
"""
if repository_receiver is None:
raise NullArgument()
if not self.supports_repository_notification():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.RepositoryNotificationSession(repository_receiver, proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_hierarchy_session(self, proxy):
"""Gets the repository hierarchy traversal session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryHierarchySession) - a
RepositoryHierarchySession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_hierarchy() is false
compliance: optional - This method must be implemented if
supports_repository_hierarchy() is true.
"""
if not self.supports_repository_hierarchy():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.RepositoryHierarchySession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_hierarchy_design_session(self, proxy):
"""Gets the repository hierarchy design session.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryHierarchyDesignSession) - a
RepostoryHierarchyDesignSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_hierarchy_design()
is false
compliance: optional - This method must be implemented if
supports_repository_hierarchy_design() is true.
"""
if not self.supports_repository_hierarchy_design():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise # OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.RepositoryHierarchyDesignSession(proxy, runtime=self._runtime)
except AttributeError:
raise # OperationFailed()
return session
def get_repository_batch_proxy_manager(self, proxy):
"""Gets a RepositoryBatchManager.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.batch.RepositoryBatchManager) - a
RepostoryBatchManager
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_batch() is false
compliance: optional - This method must be implemented if
supports_repository_batch() is true.
"""
raise Unimplemented()
def get_repository_rules_proxy_manager(self, proxy):
"""Gets a RepositoryRulesManager.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.rules.RepositoryRulesManager) - a
RepostoryRulesManager
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_rules() is false
compliance: optional - This method must be implemented if
supports_repository_rules() is true.
"""
raise Unimplemented()
| 40.646819
| 113
| 0.627984
| 12,085
| 129,704
| 6.59007
| 0.01928
| 0.065795
| 0.02461
| 0.028126
| 0.973946
| 0.954483
| 0.936038
| 0.900353
| 0.889153
| 0.870444
| 0
| 0.000023
| 0.316097
| 129,704
| 3,190
| 114
| 40.659561
| 0.897796
| 0.492776
| 0
| 0.801943
| 0
| 0
| 0.027124
| 0.015591
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104634
| false
| 0
| 0.149477
| 0
| 0.357997
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f0dfbf64f4dcadb37565d6f2f78f5ba3f6e07f8
| 3,221
|
py
|
Python
|
tests/test_evaluation/test_eval_utils.py
|
ykk648/mmhuman3d
|
26af92bcf6abbe1855e1a8a48308621410f9c047
|
[
"Apache-2.0"
] | 472
|
2021-12-03T03:12:55.000Z
|
2022-03-31T01:33:13.000Z
|
tests/test_evaluation/test_eval_utils.py
|
ykk648/mmhuman3d
|
26af92bcf6abbe1855e1a8a48308621410f9c047
|
[
"Apache-2.0"
] | 127
|
2021-12-03T05:00:14.000Z
|
2022-03-31T13:47:33.000Z
|
tests/test_evaluation/test_eval_utils.py
|
ykk648/mmhuman3d
|
26af92bcf6abbe1855e1a8a48308621410f9c047
|
[
"Apache-2.0"
] | 37
|
2021-12-03T03:23:22.000Z
|
2022-03-31T08:41:58.000Z
|
import numpy as np
import pytest
from mmhuman3d.core.evaluation import (
keypoint_3d_auc,
keypoint_3d_pck,
keypoint_accel_error,
keypoint_mpjpe,
vertice_pve,
)
def tets_accel_error():
target = np.random.rand(10, 5, 3)
output = np.copy(target)
mask = np.ones((output.shape[0]), dtype=bool)
error = keypoint_accel_error(output, target, mask)
np.testing.assert_almost_equal(error, 0)
error = keypoint_accel_error(output, target)
np.testing.assert_almost_equal(error, 0)
def tets_keypoinyt_mpjpe():
target = np.random.rand(2, 5, 3)
output = np.copy(target)
mask = np.ones((output.shape[0], output.shape[1]), dtype=bool)
with pytest.raises(ValueError):
_ = keypoint_mpjpe(output, target, mask, alignment='norm')
error = keypoint_mpjpe(output, target, mask, alignment='none')
np.testing.assert_almost_equal(error, 0)
error = keypoint_mpjpe(output, target, mask, alignment='scale')
np.testing.assert_almost_equal(error, 0)
error = keypoint_mpjpe(output, target, mask, alignment='procrustes')
np.testing.assert_almost_equal(error, 0)
R = np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]])
output = np.dot(target, R)
error = keypoint_mpjpe(output, target, mask, alignment='none')
assert error > 1e-10
R = np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]])
output = np.dot(target, R)
error = keypoint_mpjpe(output, target, mask, alignment='procrustes')
np.testing.assert_almost_equal(error, 0)
def tets_keypoinyt_pve():
target = np.random.rand(2, 6890, 3)
output = np.copy(target)
error = vertice_pve(output, target)
np.testing.assert_almost_equal(error, 0)
def test_keypoint_3d_pck():
target = np.random.rand(2, 5, 3) * 1000
output = np.copy(target)
mask = np.ones((output.shape[0], output.shape[1]), dtype=bool)
with pytest.raises(ValueError):
_ = keypoint_3d_pck(output, target, mask, alignment='norm')
pck = keypoint_3d_pck(output, target, mask, alignment='none')
np.testing.assert_almost_equal(pck, 100)
output[0, 0, :] = target[0, 0, :] + 1000
pck = keypoint_3d_pck(output, target, mask, alignment='none')
np.testing.assert_almost_equal(pck, 90, 5)
output = target * 2
pck = keypoint_3d_pck(output, target, mask, alignment='scale')
np.testing.assert_almost_equal(pck, 100)
output = target + 2
pck = keypoint_3d_pck(output, target, mask, alignment='procrustes')
np.testing.assert_almost_equal(pck, 100)
def test_keypoint_3d_auc():
target = np.random.rand(2, 5, 3) * 1000
output = np.copy(target)
mask = np.ones((output.shape[0], output.shape[1]), dtype=bool)
with pytest.raises(ValueError):
_ = keypoint_3d_auc(output, target, mask, alignment='norm')
auc = keypoint_3d_auc(output, target, mask, alignment='none')
np.testing.assert_almost_equal(auc, 30 / 31 * 100)
output = target * 2
auc = keypoint_3d_auc(output, target, mask, alignment='scale')
np.testing.assert_almost_equal(auc, 30 / 31 * 100)
output = target + 2000
auc = keypoint_3d_auc(output, target, mask, alignment='procrustes')
np.testing.assert_almost_equal(auc, 30 / 31 * 100)
| 31.578431
| 72
| 0.674635
| 462
| 3,221
| 4.530303
| 0.132035
| 0.126135
| 0.122312
| 0.179169
| 0.867176
| 0.843287
| 0.807453
| 0.768753
| 0.728141
| 0.701863
| 0
| 0.047274
| 0.185657
| 3,221
| 101
| 73
| 31.891089
| 0.750667
| 0
| 0
| 0.520548
| 0
| 0
| 0.02701
| 0
| 0
| 0
| 0
| 0
| 0.205479
| 1
| 0.068493
| false
| 0
| 0.041096
| 0
| 0.109589
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f1b8851904b0f2b3109fafa7b11434ba33c6676
| 3,507
|
py
|
Python
|
dependencies/includedyaml/dumper.py
|
vvbandeira/OpenLane
|
0034aa860db4880da8015f63f3ba340e4aac324d
|
[
"Apache-2.0"
] | 1
|
2020-07-14T08:40:59.000Z
|
2020-07-14T08:40:59.000Z
|
dependencies/includedyaml/dumper.py
|
vvbandeira/OpenLane
|
0034aa860db4880da8015f63f3ba340e4aac324d
|
[
"Apache-2.0"
] | null | null | null |
dependencies/includedyaml/dumper.py
|
vvbandeira/OpenLane
|
0034aa860db4880da8015f63f3ba340e4aac324d
|
[
"Apache-2.0"
] | null | null | null |
__all__ = ["BaseDumper", "SafeDumper", "Dumper"]
from .emitter import *
from .serializer import *
from .representer import *
from .resolver import *
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=False,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
sort_keys=True,
):
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
Representer.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
sort_keys=sort_keys,
)
Resolver.__init__(self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=False,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
sort_keys=True,
):
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
SafeRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
sort_keys=sort_keys,
)
Resolver.__init__(self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=False,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
sort_keys=True,
):
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
Representer.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
sort_keys=sort_keys,
)
Resolver.__init__(self)
| 25.230216
| 69
| 0.546906
| 313
| 3,507
| 5.693291
| 0.124601
| 0.06734
| 0.080808
| 0.070707
| 0.856902
| 0.856902
| 0.856902
| 0.856902
| 0.856902
| 0.856902
| 0
| 0
| 0.380097
| 3,507
| 138
| 70
| 25.413043
| 0.819687
| 0
| 0
| 0.862595
| 0
| 0
| 0.007414
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022901
| false
| 0
| 0.030534
| 0
| 0.076336
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f2ee5424ab7e63644d258c2959bdf3f97ab95f6
| 514
|
py
|
Python
|
src/env_paths/__init__.py
|
atu4403/env-paths
|
a329c66865c0283fc03589c2de7d8a1c39bd2a1e
|
[
"MIT"
] | null | null | null |
src/env_paths/__init__.py
|
atu4403/env-paths
|
a329c66865c0283fc03589c2de7d8a1c39bd2a1e
|
[
"MIT"
] | 2
|
2021-11-11T19:19:46.000Z
|
2021-11-12T00:01:47.000Z
|
src/env_paths/__init__.py
|
atu4403/env-paths
|
a329c66865c0283fc03589c2de7d8a1c39bd2a1e
|
[
"MIT"
] | null | null | null |
from .main import *
def data_dir(name: str, options: dict = {}) -> str:
return env_paths(name, options)["data"]
def config_dir(name: str, options: dict = {}) -> str:
return env_paths(name, options)["config"]
def cache_dir(name: str, options: dict = {}) -> str:
return env_paths(name, options)["cache"]
def log_dir(name: str, options: dict = {}) -> str:
return env_paths(name, options)["log"]
def temp_dir(name: str, options: dict = {}) -> str:
return env_paths(name, options)["temp"]
| 23.363636
| 53
| 0.638132
| 73
| 514
| 4.356164
| 0.232877
| 0.110063
| 0.157233
| 0.267296
| 0.77044
| 0.77044
| 0.77044
| 0.77044
| 0.77044
| 0.77044
| 0
| 0
| 0.182879
| 514
| 21
| 54
| 24.47619
| 0.757143
| 0
| 0
| 0
| 0
| 0
| 0.042802
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0.454545
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
6f6beb3d4c6797b924561498c3fc9def428d4570
| 167
|
py
|
Python
|
fastpsm/psm.py
|
ryanxjhan/fastpsm
|
6b9c1d1bba9db50ac090d508be78c31cde8d8096
|
[
"MIT"
] | null | null | null |
fastpsm/psm.py
|
ryanxjhan/fastpsm
|
6b9c1d1bba9db50ac090d508be78c31cde8d8096
|
[
"MIT"
] | null | null | null |
fastpsm/psm.py
|
ryanxjhan/fastpsm
|
6b9c1d1bba9db50ac090d508be78c31cde8d8096
|
[
"MIT"
] | null | null | null |
import fastpsm.internal.tabular
def setup():
return fastpsm.internal.tabular.setup()
def compare_methods():
return fastpsm.internal.tabular.compare_methods()
| 23.857143
| 53
| 0.778443
| 20
| 167
| 6.4
| 0.45
| 0.351563
| 0.515625
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113772
| 167
| 7
| 53
| 23.857143
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
48a4d1ffd6b3ddf59f242fa87ad15aee1dd08eb2
| 11,741
|
py
|
Python
|
coremltools/test/xgboost/test_boosted_trees_classifier.py
|
Gerzer/coremltools
|
47e2010a68668bd1960dca040f5f87c0e66a0cbd
|
[
"BSD-3-Clause"
] | 65
|
2019-10-02T09:56:22.000Z
|
2022-03-16T22:41:14.000Z
|
coremltools/test/xgboost/test_boosted_trees_classifier.py
|
Gerzer/coremltools
|
47e2010a68668bd1960dca040f5f87c0e66a0cbd
|
[
"BSD-3-Clause"
] | 51
|
2020-01-13T07:54:13.000Z
|
2022-03-17T09:11:56.000Z
|
coremltools/test/xgboost/test_boosted_trees_classifier.py
|
Gerzer/coremltools
|
47e2010a68668bd1960dca040f5f87c0e66a0cbd
|
[
"BSD-3-Clause"
] | 16
|
2020-03-06T09:26:03.000Z
|
2022-02-05T05:35:05.000Z
|
# Copyright (c) 2017, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
import unittest
import tempfile
import json
from sklearn.ensemble import GradientBoostingClassifier
from coremltools.converters import sklearn as skl_converter
from coremltools.proto import Model_pb2
from coremltools.proto import FeatureTypes_pb2
from coremltools._deps import HAS_XGBOOST
from coremltools._deps import HAS_SKLEARN
if HAS_XGBOOST:
import xgboost
from coremltools.converters import xgboost as xgb_converter
@unittest.skipIf(not HAS_SKLEARN, 'Missing sklearn. Skipping tests.')
class GradientBoostingBinaryClassifierScikitTest(unittest.TestCase):
"""
Unit test class for testing scikit-learn converter.
"""
@classmethod
def setUpClass(self):
"""
Set up the unit test by loading the dataset and training a model.
"""
from sklearn.datasets import load_boston
scikit_data = load_boston()
scikit_model = GradientBoostingClassifier(random_state = 1)
target = scikit_data['target'] > scikit_data['target'].mean()
scikit_model.fit(scikit_data['data'], target)
# Save the data and the model
self.scikit_data = scikit_data
self.scikit_model = scikit_model
def test_conversion(self):
input_names = self.scikit_data.feature_names
output_name = 'target'
spec = skl_converter.convert(self.scikit_model, input_names, 'target').get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertIsNotNone(spec.treeEnsembleClassifier)
# Test the interface class
self.assertEqual(spec.description.predictedFeatureName,
'target')
# Test the inputs and outputs
self.assertEqual(len(spec.description.output), 2)
self.assertEqual(spec.description.output[0].name, 'target')
self.assertEqual(spec.description.output[0].type.WhichOneof('Type'),
'int64Type')
for input_type in spec.description.input:
self.assertEqual(input_type.type.WhichOneof('Type'),
'doubleType')
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
# Test the linear regression parameters.
tr = spec.pipelineClassifier.pipeline.models[1].treeEnsembleClassifier.treeEnsemble
self.assertIsNotNone(tr)
self.assertEqual(len(tr.nodes), 1416)
def test_conversion_bad_inputs(self):
# Error on converting an untrained model
with self.assertRaises(Exception):
model = GradientBoostingClassifier()
spec = skl_converter.convert(model, 'data', 'out')
# Check the expected class during covnersion.
from sklearn.preprocessing import OneHotEncoder
with self.assertRaises(Exception):
model = OneHotEncoder()
spec = skl_converter.convert(model, 'data', 'out')
@unittest.skipIf(not HAS_SKLEARN, 'Missing sklearn. Skipping tests.')
class GradientBoostingMulticlassClassifierScikitTest(unittest.TestCase):
"""
Unit test class for testing scikit-learn converter.
"""
@classmethod
def setUpClass(self):
"""
Set up the unit test by loading the dataset and training a model.
"""
from sklearn.datasets import load_boston
import numpy as np
scikit_data = load_boston()
scikit_model = GradientBoostingClassifier(random_state = 1)
t = scikit_data.target
target = np.digitize(t, np.histogram(t)[1]) - 1
scikit_model.fit(scikit_data.data, target)
self.target = target
# Save the data and the model
self.scikit_data = scikit_data
self.scikit_model = scikit_model
def test_conversion(self):
input_names = self.scikit_data.feature_names
output_name = 'target'
spec = skl_converter.convert(self.scikit_model, input_names, 'target').get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertEqual(spec.description.predictedFeatureName, 'target')
# Test the inputs and outputs
self.assertEqual(len(spec.description.output), 2)
self.assertEqual(spec.description.output[0].name, 'target')
self.assertEqual(spec.description.output[0].type.WhichOneof('Type'), 'int64Type')
for input_type in spec.description.input:
self.assertEqual(input_type.type.WhichOneof('Type'), 'doubleType')
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEqual(len(spec.pipelineClassifier.pipeline.models), 2)
tr = spec.pipelineClassifier.pipeline.models[-1].treeEnsembleClassifier.treeEnsemble
self.assertIsNotNone(tr)
self.assertEqual(len(tr.nodes), 15056)
def test_conversion_bad_inputs(self):
# Error on converting an untrained model
with self.assertRaises(Exception):
model = GradientBoostingClassifier()
spec = skl_converter.convert(model, 'data', 'out')
# Check the expected class during covnersion.
from sklearn.preprocessing import OneHotEncoder
with self.assertRaises(Exception):
model = OneHotEncoder()
spec = skl_converter.convert(model, 'data', 'out')
@unittest.skipIf(not HAS_SKLEARN, 'Missing sklearn. Skipping tests.')
@unittest.skipIf(not HAS_XGBOOST, 'Skipping, no xgboost')
class GradientBoostingBinaryClassifierXGboostTest(unittest.TestCase):
"""
Unit test class for testing xgboost converter.
"""
@classmethod
def setUpClass(self):
"""
Set up the unit test by loading the dataset and training a model.
"""
from sklearn.datasets import load_boston
scikit_data = load_boston()
self.xgb_model = xgboost.XGBClassifier()
target = scikit_data['target'] > scikit_data['target'].mean()
self.xgb_model.fit(scikit_data['data'], target)
# Save the data and the model
self.scikit_data = scikit_data
def test_conversion(self):
input_names = self.scikit_data.feature_names
output_name = 'target'
spec = xgb_converter.convert(self.xgb_model, input_names, output_name, mode="classifier").get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertIsNotNone(spec.treeEnsembleClassifier)
# Test the interface class
self.assertEqual(spec.description.predictedFeatureName,
output_name)
# Test the inputs and outputs
self.assertEqual(len(spec.description.output), 2)
self.assertEqual(spec.description.output[0].name, output_name)
self.assertEqual(spec.description.output[0].type.WhichOneof('Type'),
'int64Type')
for input_type in spec.description.input:
self.assertEqual(input_type.type.WhichOneof('Type'),
'doubleType')
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
# Test the linear regression parameters.
tr = spec.treeEnsembleClassifier.treeEnsemble
self.assertIsNotNone(tr)
def test_conversion_bad_inputs(self):
# Error on converting an untrained model
with self.assertRaises(Exception):
model = xgboost.XGBClassifier()
spec = xgb_converter.convert(model, 'data', 'out', mode="classifier")
# Check the expected class during covnersion.
with self.assertRaises(Exception):
model = xgboost.XGBRegressor()
spec = xgb_converter.convert(model, 'data', 'out', mode="classifier")
@unittest.skipIf(not HAS_SKLEARN, 'Missing sklearn. Skipping tests.')
@unittest.skipIf(not HAS_XGBOOST, 'Skipping, no xgboost')
class GradientBoostingMulticlassClassifierXGboostTest(unittest.TestCase):
"""
Unit test class for testing xgboost converter.
"""
@classmethod
def setUpClass(self):
"""
Set up the unit test by loading the dataset and training a model.
"""
from sklearn.datasets import load_boston
import numpy as np
scikit_data = load_boston()
t = scikit_data.target
target = np.digitize(t, np.histogram(t)[1]) - 1
dtrain = xgboost.DMatrix(scikit_data.data, label=target, feature_names=scikit_data.feature_names)
self.xgb_model = xgboost.train({}, dtrain)
self.target = target
# Save the data and the model
self.scikit_data = scikit_data
self.n_classes = len(np.unique(self.target))
def test_conversion(self):
input_names = self.scikit_data.feature_names
output_name = 'target'
spec = xgb_converter.convert(
self.xgb_model,
input_names,
output_name,
mode="classifier",
n_classes=self.n_classes,
).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertEqual(spec.description.predictedFeatureName, output_name)
# Test the inputs and outputs
self.assertEqual(len(spec.description.output), 2)
self.assertEqual(spec.description.output[0].name, output_name)
self.assertEqual(spec.description.output[0].type.WhichOneof('Type'), 'int64Type')
for input_type in spec.description.input:
self.assertEqual(input_type.type.WhichOneof('Type'), 'doubleType')
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
# Test the linear regression parameters.
tr = spec.treeEnsembleClassifier.treeEnsemble
self.assertIsNotNone(tr)
def test_conversion_from_file(self):
import numpy as np
output_name = 'target'
feature_names = self.scikit_data.feature_names
xgb_model_json = tempfile.mktemp('xgb_tree_model_classifier.json')
xgb_json_out = self.xgb_model.get_dump(with_stats=True, dump_format='json')
with open(xgb_model_json, 'w') as f:
json.dump(xgb_json_out, f)
spec = xgb_converter.convert(
xgb_model_json,
feature_names,
output_name,
mode="classifier",
n_classes=self.n_classes,
).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertIsNotNone(spec.treeEnsembleRegressor)
# Test the interface class
self.assertEqual(spec.description.predictedFeatureName,
output_name)
# Test the inputs and outputs
self.assertEqual(len(spec.description.output), 2)
self.assertEqual(spec.description.output[0].name, output_name)
self.assertEqual(spec.description.output[0].type.WhichOneof('Type'), 'int64Type')
for input_type in spec.description.input:
self.assertEqual(input_type.type.WhichOneof('Type'),
'doubleType')
self.assertEqual(sorted(self.scikit_data.feature_names),
sorted(map(lambda x: x.name, spec.description.input)))
# Test the linear regression parameters.
tr = spec.treeEnsembleClassifier.treeEnsemble
self.assertIsNotNone(tr)
| 37.273016
| 108
| 0.669279
| 1,315
| 11,741
| 5.847148
| 0.134601
| 0.068279
| 0.037066
| 0.058525
| 0.849005
| 0.83704
| 0.821953
| 0.816751
| 0.805827
| 0.793081
| 0
| 0.005704
| 0.238481
| 11,741
| 314
| 109
| 37.39172
| 0.854267
| 0.128268
| 0
| 0.766497
| 0
| 0
| 0.052527
| 0.002984
| 0
| 0
| 0
| 0
| 0.28934
| 1
| 0.060914
| false
| 0
| 0.101523
| 0
| 0.182741
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48fa7d6276dea425f7ac573870e2a230c2acac92
| 50,641
|
py
|
Python
|
Load_model.py
|
skkuej/StableMedicalGAN
|
adfe9e217944ffff9e807f68fc9dbd980746a678
|
[
"MIT"
] | null | null | null |
Load_model.py
|
skkuej/StableMedicalGAN
|
adfe9e217944ffff9e807f68fc9dbd980746a678
|
[
"MIT"
] | null | null | null |
Load_model.py
|
skkuej/StableMedicalGAN
|
adfe9e217944ffff9e807f68fc9dbd980746a678
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
def tumor_resunet():
from tensorflow.keras.backend import int_shape
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Conv2D, Conv3D, MaxPooling2D, MaxPooling3D, UpSampling2D, UpSampling3D, Add, BatchNormalization, Input, Activation, Lambda, Concatenate
import tensorflow as tf
from tensorflow import keras
import os
import time
import datetime
from matplotlib import pyplot as plt
from IPython import display
import numpy as np
import pandas as pd
def res_unet(filter_root, depth, input_size=(128, 128, 1), activation='relu', batch_norm=True, final_activation='tanh'):
inputs = Input(input_size)
x = inputs
# Dictionary for long connections
long_connection_store = {}
Conv = Conv2D
MaxPooling = MaxPooling2D
UpSampling = UpSampling2D
# Down sampling
for i in range(depth):
out_channel = 2**i * filter_root
res = Conv(out_channel, kernel_size=1, padding='same', use_bias=False, name="Identity{}_1".format(i))(x)
conv1 = Conv(out_channel, kernel_size=3, padding='same', name="dConv{}_1".format(i))(x)
if batch_norm:
conv1 = BatchNormalization(name="BN{}_1".format(i))(conv1)
act1 = Activation(activation, name="Act{}_1".format(i))(conv1)
conv2 = Conv(out_channel, kernel_size=3, padding='same', name="ddConv{}_2".format(i))(act1)
if batch_norm:
conv2 = BatchNormalization(name="BN{}_2".format(i))(conv2)
resconnection = Add(name="Add{}_1".format(i))([res, conv2])
act2 = Activation('selu', name="Act{}_2".format(i))(resconnection)
if i < depth - 1:
long_connection_store[str(i)] = act2
x = downsample(out_channel,3)(act2)
else:
x = act2
# Upsampling
for i in range(depth - 2, -1, -1):
out_channel = 2**(i) * filter_root
long_connection = long_connection_store[str(i)]
# up1 = UpSampling(name="UpSampling{}_1".format(i))(x)
up1 = upsample(out_channel,3)(x)
up_conv1 = Conv(out_channel, 2, activation='relu', padding='same', name="upConv{}_1".format(i))(up1)
up_conc = Concatenate(axis=-1, name="upConcatenate{}_1".format(i))([up_conv1, long_connection])
up_conv2 = Conv(out_channel, 3, padding='same', name="uupConv{}_1".format(i))(up_conc)
if batch_norm:
up_conv2 = BatchNormalization(name="upBN{}_1".format(i))(up_conv2)
up_act1 = Activation(activation, name="upAct{}_1".format(i))(up_conv2)
up_conv2 = Conv(out_channel, 3, padding='same', name="upConv{}_2".format(i))(up_act1)
if batch_norm:
up_conv2 = BatchNormalization(name="upBN{}_2".format(i))(up_conv2)
res = Conv(out_channel, kernel_size=1, padding='same', use_bias=False, name="upIdentity{}_1".format(i))(up_conc)
resconnection = Add(name="upAdd{}_1".format(i))([res, up_conv2])
x = Activation(activation, name="upAct{}_2".format(i))(resconnection)
if i == 2: x = attention(x, x.shape[3], size=1)
# Final convolution
output = Conv(1, 1, padding='same', activation=final_activation, name='output')(x)
return Model(inputs, outputs=output, name='Res-UNet')
OUTPUT_CHANNELS = 1
def downsample(filters, size, apply_batchnorm=True):
initializer = tf.keras.initializers.he_normal()
result = tf.keras.Sequential()
result.add(
SpectralNormalization(tf.keras.layers.Conv2D(filters, size, strides=2, padding='same',
kernel_initializer=initializer, use_bias=False)))
if apply_batchnorm:
result.add(tf.keras.layers.BatchNormalization())
result.add(tf.keras.layers.LeakyReLU())
return result
def upsample(filters, size, apply_dropout=False):
initializer = tf.keras.initializers.he_normal()
result = tf.keras.Sequential()
result.add(
SpectralNormalization(tf.keras.layers.Conv2DTranspose(filters, size, strides=2,
padding='same', kernel_initializer=initializer, use_bias=False)))
result.add(tf.keras.layers.BatchNormalization())
if apply_dropout: result.add(tf.keras.layers.Dropout(0.5))
result.add(tf.keras.layers.ReLU())
return result
class SpectralNormalization(tf.keras.layers.Wrapper):
def __init__(self, layer, iteration=1, eps=1e-12, training=True, **kwargs):
self.iteration = iteration
self.eps = eps
self.do_power_iteration = training
if not isinstance(layer, tf.keras.layers.Layer):
raise ValueError(
'Please initialize `TimeDistributed` layer with a '
'`Layer` instance. You passed: {input}'.format(input=layer))
super(SpectralNormalization, self).__init__(layer, **kwargs)
def build(self, input_shape):
self.layer.build(input_shape)
self.w = self.layer.kernel
self.w_shape = self.w.shape.as_list()
self.v = self.add_weight(shape=(1, self.w_shape[0] * self.w_shape[1] * self.w_shape[2]),
initializer=tf.initializers.TruncatedNormal(stddev=0.02),
trainable=False,
name='sn_v',
dtype=tf.float32)
self.u = self.add_weight(shape=(1, self.w_shape[-1]),
initializer=tf.initializers.TruncatedNormal(stddev=0.02),
trainable=False,
name='sn_u',
dtype=tf.float32)
super(SpectralNormalization, self).build()
def call(self, inputs):
self.update_weights()
output = self.layer(inputs)
self.restore_weights() # Restore weights because of this formula "W = W - alpha * W_SN`"
return output
def update_weights(self):
w_reshaped = tf.reshape(self.w, [-1, self.w_shape[-1]])
u_hat = self.u
v_hat = self.v # init v vector
if self.do_power_iteration:
for _ in range(self.iteration):
v_ = tf.matmul(u_hat, tf.transpose(w_reshaped))
v_hat = v_ / (tf.reduce_sum(v_**2)**0.5 + self.eps)
u_ = tf.matmul(v_hat, w_reshaped)
u_hat = u_ / (tf.reduce_sum(u_**2)**0.5 + self.eps)
sigma = tf.matmul(tf.matmul(v_hat, w_reshaped), tf.transpose(u_hat))
self.u.assign(u_hat)
self.v.assign(v_hat)
self.layer.kernel.assign(self.w / sigma)
def restore_weights(self):
self.layer.kernel.assign(self.w)
def hw_flatten(i) :
sh_list = tf.shape(i)
return tf.reshape(i, shape=[sh_list[0],sh_list[1]*sh_list[2],sh_list[3]])
def attention(x, channels, size):
initializer = tf.initializers.GlorotNormal()
f = tf.keras.layers.Conv2D(channels // 8, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//8] 8
g = tf.keras.layers.Conv2D(channels // 8, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//8] 8
h = tf.keras.layers.Conv2D(channels, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//2] 32
# N = h * w
s = tf.matmul(hw_flatten(g), hw_flatten(f), transpose_b=True) # [bs, N, N]
beta = tf.nn.softmax(s) # attention map
o = tf.matmul(beta, hw_flatten(h)) # [bs, N, C]
gamma = tf.Variable(initial_value=[1.])#, initial_value=tf.constant(0.0))
o = tf.reshape(o, shape = tf.shape(x)) # [bs, h, w, C]
o = tf.keras.layers.Conv2D(channels, kernel_size=1, strides=1)(o)
x = gamma * o + x
return x
def Discriminator_RES():
initializer = tf.initializers.GlorotNormal()
inp = tf.keras.layers.Input(shape=[128,128,1], name='input_image', dtype="float32")
tar = tf.keras.layers.Input(shape=[128,128,1], name='target_image', dtype="float32")
x = tf.keras.layers.concatenate([inp, tar])
down1 = downsample(64, 4, False)(x) # (bs, 256, 256, 2) -> (bs, 128, 128, 64)
down2 = downsample(128, 4)(down1) # (bs, 128, 128, 64) -> (bs, 64,64, 128)
# att1 = attention(down3, down3.shape[3], size=1)
zero_pad1 = tf.keras.layers.ZeroPadding2D()(down2) # (bs, 34, 34, 256)
conv = tf.keras.layers.Conv2D(256, 4, strides=1,
kernel_initializer=initializer,use_bias=False)(zero_pad1) # (bs, 34-4+1=31, 31, 256)
batchnorm1 = tf.keras.layers.BatchNormalization()(conv)
leaky_relu = tf.keras.layers.LeakyReLU()(batchnorm1)
att2 = attention(leaky_relu, leaky_relu.shape[3], size=1)
zero_pad2 = tf.keras.layers.ZeroPadding2D()(att2) # (bs, 33, 33, 256)
last = tf.keras.layers.Conv2D(1, 4, strides=1,
kernel_initializer=initializer)(zero_pad2) # (bs, 33-4+1=30, 30, 1)
return tf.keras.Model(inputs=[inp, tar], outputs=last)
LAMBDA = 100
loss_object = tf.keras.losses.Hinge()
generator_optimizer = tf.keras.optimizers.Adam(1e-4, beta_1=0.5)
discriminator_optimizer = tf.keras.optimizers.Adam(4e-4, beta_1=0.5)
def generator_loss(disc_generated_output, gen_output, target):
roi_tar = tf.image.central_crop(target, central_fraction=0.3)
roi_gen = tf.image.central_crop(gen_output,central_fraction=0.3)
roi_loss = (LAMBDA*1.2) *tf.reduce_mean(tf.abs(roi_tar-roi_gen))
gan_loss = loss_object(tf.ones_like(disc_generated_output), disc_generated_output)
l1_loss = LAMBDA * tf.reduce_mean(tf.abs(target - gen_output))
total_gen_loss = gan_loss + l1_loss + roi_loss
return total_gen_loss, gan_loss, l1_loss, roi_loss
def discriminator_loss(disc_real_output, disc_generated_output):
real_loss = loss_object(tf.ones_like(disc_real_output), disc_real_output)
generated_loss = loss_object(tf.zeros_like(disc_generated_output), disc_generated_output)
total_disc_loss = real_loss + generated_loss
return total_disc_loss, generated_loss, real_loss
def ssim_function(x,y):
C1 = np.square(0.01*2)
C2 = np.square(0.03*2)
mean_x = np.mean(x)
mean_y = np.mean(y)
std_x = np.std(x)
std_y = np.std(y)
cov_xy = np.cov((y.numpy().flatten(),x.numpy().flatten())) # covariance 2x2 matrix
numerator = (2*mean_x*mean_y +C1 )*(2*np.mean(cov_xy) + C2)
denominator = (np.square(mean_x)+np.square(mean_y)+C1)*(np.square(std_x)+np.square(std_y)+C2)
return numerator/denominator
res_generator = res_unet(64,4)
res_generator.load_weights(r'F:\gan\model\ori\breast_res_model\g471_20200820-054106')
res_discriminator = Discriminator_RES()
res_discriminator.load_weights(r'F:\gan\model\ori\breast_res_model\d471_20200820-054110')
return res_generator, res_discriminator
# In[2]:
def tumor_sagan():
import tensorflow as tf
from tensorflow import keras
import os
import time
import datetime
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
# SAGAN - BOX loss
class SpectralNormalization(tf.keras.layers.Wrapper):
def __init__(self, layer, iteration=1, eps=1e-12, training=True, **kwargs):
self.iteration = iteration
self.eps = eps
self.do_power_iteration = training
if not isinstance(layer, tf.keras.layers.Layer):
raise ValueError(
'Please initialize `TimeDistributed` layer with a '
'`Layer` instance. You passed: {input}'.format(input=layer))
super(SpectralNormalization, self).__init__(layer, **kwargs)
def build(self, input_shape):
self.layer.build(input_shape)
self.w = self.layer.kernel
self.w_shape = self.w.shape.as_list()
self.v = self.add_weight(shape=(1, self.w_shape[0] * self.w_shape[1] * self.w_shape[2]),
initializer=tf.initializers.TruncatedNormal(stddev=0.02),
trainable=False,
name='sn_v',
dtype=tf.float32)
self.u = self.add_weight(shape=(1, self.w_shape[-1]),
initializer=tf.initializers.TruncatedNormal(stddev=0.02),
trainable=False,
name='sn_u',
dtype=tf.float32)
super(SpectralNormalization, self).build()
def call(self, inputs):
self.update_weights()
output = self.layer(inputs)
self.restore_weights() # Restore weights because of this formula "W = W - alpha * W_SN`"
return output
def update_weights(self):
w_reshaped = tf.reshape(self.w, [-1, self.w_shape[-1]])
u_hat = self.u
v_hat = self.v # init v vector
if self.do_power_iteration:
for _ in range(self.iteration):
v_ = tf.matmul(u_hat, tf.transpose(w_reshaped))
v_hat = v_ / (tf.reduce_sum(v_**2)**0.5 + self.eps)
u_ = tf.matmul(v_hat, w_reshaped)
u_hat = u_ / (tf.reduce_sum(u_**2)**0.5 + self.eps)
sigma = tf.matmul(tf.matmul(v_hat, w_reshaped), tf.transpose(u_hat))
self.u.assign(u_hat)
self.v.assign(v_hat)
self.layer.kernel.assign(self.w / sigma)
def restore_weights(self):
self.layer.kernel.assign(self.w)
OUTPUT_CHANNELS = 1
def downsample(filters, size, apply_batchnorm=True, apply_residual=True):
initializer = tf.keras.initializers.he_normal()
result = tf.keras.Sequential()
result.add(
SpectralNormalization(tf.keras.layers.Conv2D(filters, size, strides=2, padding='same',
kernel_initializer=initializer, use_bias=False)))
if apply_batchnorm:
result.add(tf.keras.layers.BatchNormalization())
result.add(tf.keras.layers.LeakyReLU())
return result
def upsample(filters, size, apply_dropout=False):
initializer = tf.keras.initializers.he_normal()
result = tf.keras.Sequential()
result.add(
SpectralNormalization(tf.keras.layers.Conv2DTranspose(filters, size, strides=2,
padding='same', kernel_initializer=initializer, use_bias=False)))
result.add(tf.keras.layers.BatchNormalization())
if apply_dropout:
result.add(tf.keras.layers.Dropout(0.5))
result.add(tf.keras.layers.ReLU())
return result
def hw_flatten(i) :
sh_list = tf.shape(i)
return tf.reshape(i, shape=[sh_list[0],sh_list[1]*sh_list[2],sh_list[3]])
def attention(x, channels, size):
initializer = tf.initializers.GlorotNormal()
f = tf.keras.layers.Conv2D(channels // 8, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//8] 8
g = tf.keras.layers.Conv2D(channels // 8, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//8] 8
h = tf.keras.layers.Conv2D(channels, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//2] 32
# N = h * w
s = tf.matmul(hw_flatten(g), hw_flatten(f), transpose_b=True) # [bs, N, N]
beta = tf.nn.softmax(s) # attention map
o = tf.matmul(beta, hw_flatten(h)) # [bs, N, C]
gamma = tf.Variable(initial_value=[1.])#, initial_value=tf.constant(0.0))
o = tf.reshape(o, shape = tf.shape(x)) # [bs, h, w, C]
o = tf.keras.layers.Conv2D(channels, kernel_size=1, strides=1)(o)
x = gamma * o + x
return x
def Generator_SA():
inputs = tf.keras.layers.Input(shape=[128,128,1], dtype="float32")
down_stack = [
downsample(32, 4, apply_batchnorm=False), # (bs, 64, 64, 32)
downsample(64, 4), # (bs, 32, 32, 64)
downsample(128, 4), # (bs, 16, 16, 128)
downsample(256, 4), # (bs, 8, 8, 256)
downsample(256, 4), # (bs, 4, 4, 256)
downsample(256, 4), # (bs, 2, 2, 256)
downsample(256, 4), # (bs, 1, 1, 256)
]
up_stack = [
upsample(256, 4, apply_dropout=True), # (bs, 4, 4, 256)
upsample(256, 4, apply_dropout=True), # (bs, 8, 8, 256)
upsample(256, 4), # (bs, 16, 16, 256)
upsample(128, 4), # (bs, 32, 32, 128)
upsample(64, 4), # (bs, 64, 64, 64)
# self-attention layer
upsample(32, 4), # (bs,128, 128, 32)
# self-attention layer
]
# initializer = tf.random_normal_initializer(0., 0.02)
initializer = tf.initializers.GlorotNormal()
# initializer = tf.keras.initializers.he_normal()
last = tf.keras.layers.Conv2DTranspose(OUTPUT_CHANNELS, 4,
strides=2,
padding='same',
kernel_initializer=initializer, activation='tanh') # (bs, 256,256,1)
x = inputs
# Downsampling through the model
skips = []
for down in down_stack:
x = down(x)
skips.append(x)
skips = reversed(skips[:-1])
cnt = 1
# Upsampling and establishing the skip connections
for up, skip in zip(up_stack, skips):
x = up(x)
x = tf.keras.layers.Concatenate()([x,skip])
if x.shape[3] == 128 :#or x.shape[3] == 32
x_pre = x
x = attention(x, x.shape[3], size=1)
x = last(x)
x = tf.cast(x, tf.float32)
return tf.keras.Model(inputs=inputs, outputs=x)
def Discriminator_SA():
initializer = tf.initializers.GlorotNormal()
inp = tf.keras.layers.Input(shape=[128,128,1], name='input_image', dtype="float32")
tar = tf.keras.layers.Input(shape=[128,128,1], name='target_image', dtype="float32")
x = tf.keras.layers.concatenate([inp, tar])
down1 = downsample(64, 4, False)(x) # (bs, 256, 256, 2) -> (bs, 128, 128, 64)
down2 = downsample(128, 4)(down1) # (bs, 128, 128, 64) -> (bs, 64,64, 128)
# att1 = attention(down3, down3.shape[3], size=1)
zero_pad1 = tf.keras.layers.ZeroPadding2D()(down2) # (bs, 34, 34, 256)
conv = tf.keras.layers.Conv2D(256, 4, strides=1,
kernel_initializer=initializer,use_bias=False)(zero_pad1) # (bs, 34-4+1=31, 31, 256)
batchnorm1 = tf.keras.layers.BatchNormalization()(conv)
leaky_relu = tf.keras.layers.LeakyReLU()(batchnorm1)
att2 = attention(leaky_relu, leaky_relu.shape[3], size=1)
zero_pad2 = tf.keras.layers.ZeroPadding2D()(att2) # (bs, 33, 33, 256)
last = tf.keras.layers.Conv2D(1, 4, strides=1,
kernel_initializer=initializer)(zero_pad2) # (bs, 33-4+1=30, 30, 1)
return tf.keras.Model(inputs=[inp, tar], outputs=last)
generator_sa = Generator_SA()
discriminator_sa = Discriminator_SA()
generator_sa.load_weights(r'F:\gan\model\ori\breast_loss_model\g510_20200816-013729')
discriminator_sa.load_weights(r'F:\gan\model\ori\breast_loss_model\d510_20200816-013730')
return generator_sa, discriminator_sa
# In[3]:
def tumor_origin():
import tensorflow as tf
from tensorflow import keras
import os
import time
import datetime
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
OUTPUT_CHANNELS = 1
def downsample(filters, size, apply_batchnorm=True):
initializer = tf.random_normal_initializer(0., 0.02)
result = tf.keras.Sequential()
result.add(
tf.keras.layers.Conv2D(filters, size, strides=2, padding='same',
kernel_initializer=initializer, use_bias=False))
if apply_batchnorm:
result.add(tf.keras.layers.BatchNormalization())
result.add(tf.keras.layers.LeakyReLU())
return result
def upsample(filters, size, apply_dropout=False):
initializer = tf.random_normal_initializer(0., 0.02)
result = tf.keras.Sequential()
result.add(
tf.keras.layers.Conv2DTranspose(filters, size, strides=2,
padding='same',
kernel_initializer=initializer,
use_bias=False))
result.add(tf.keras.layers.BatchNormalization())
if apply_dropout:
result.add(tf.keras.layers.Dropout(0.5))
result.add(tf.keras.layers.ReLU())
return result
def Generator():
inputs = tf.keras.layers.Input(shape=[128,128,1], dtype="float32")
down_stack = [
downsample(32, 4, apply_batchnorm=False), # (bs, 64, 64, 32)
downsample(64, 4), # (bs, 32, 32, 64)
downsample(128, 4), # (bs, 16, 16, 128)
downsample(256, 4), # (bs, 8, 8, 256)
downsample(256, 4), # (bs, 4, 4, 256)
downsample(256, 4), # (bs, 2, 2, 256)
downsample(256, 4) # (bs, 1, 1, 256)
]
up_stack = [
upsample(256, 4, apply_dropout=True), # (bs, 4, 4, 256)
upsample(256, 4, apply_dropout=True), # (bs, 8, 8, 256)
upsample(256, 4), # (bs, 16, 16, 256)
upsample(128, 4), # (bs, 32, 32, 128)
upsample(64, 4), # (bs, 64, 64, 64)
upsample(32, 4)
]
initializer = tf.random_normal_initializer(0., 0.02)
last = tf.keras.layers.Conv2DTranspose(OUTPUT_CHANNELS, 4,
strides=2,
padding='same',
kernel_initializer=initializer, activation='tanh') # (bs, 128,128,1)
x = inputs
# Downsampling through the model
skips = []
for down in down_stack:
x = down(x)
skips.append(x)
skips = reversed(skips[:-1])
# Upsampling and establishing the skip connections
for up, skip in zip(up_stack, skips):
x = up(x)
x = tf.keras.layers.Concatenate()([x, skip])
x = last(x)
x = tf.cast(x, tf.float32)
return tf.keras.Model(inputs=inputs, outputs=x)
def Discriminator():
initializer = tf.random_normal_initializer(0., 0.02)
inp = tf.keras.layers.Input(shape=[128,128,1], name='input_image', dtype="float32")
tar = tf.keras.layers.Input(shape=[128,128,1], name='target_image', dtype="float32")
x = tf.keras.layers.concatenate([inp, tar]) # (bs, 128,128,channel*2=2)
down1 = downsample(64, 4, False)(x) # (bs, 64,64, 64) batchnorm = False ??
down2 = downsample(128, 4)(down1) # (bs, 32, 32, 128)
zero_pad1 = tf.keras.layers.ZeroPadding2D()(down2) # (bs, 34, 34, 256)
conv = tf.keras.layers.Conv2D(256, 4, strides=1,
kernel_initializer=initializer,
use_bias=False)(zero_pad1) # (bs, 34-4+1=31, 31, 256)
batchnorm1 = tf.keras.layers.BatchNormalization()(conv)
leaky_relu = tf.keras.layers.LeakyReLU()(batchnorm1)
zero_pad2 = tf.keras.layers.ZeroPadding2D()(leaky_relu) # (bs, 33, 33, 256)
last = tf.keras.layers.Conv2D(1, 4, strides=1,
kernel_initializer=initializer)(zero_pad2) # (bs, 33-4+1=30, 30, 1)
return tf.keras.Model(inputs=[inp, tar], outputs=last)
generator = Generator()
discriminator = Discriminator()
LAMBDA = 100
loss_object = tf.keras.losses.BinaryCrossentropy(from_logits=True)
generator_optimizer = tf.keras.optimizers.Adam(2e-4, beta_1=0.5)
discriminator_optimizer = tf.keras.optimizers.Adam(2e-4, beta_1=0.5) # default beta 1 : 0.9 (momentum wieght)
generator = Generator()
generator.load_weights(r'F:\gan\model\ori\breast_ori_model\g200_20200814-154426')
discriminator = Discriminator()
discriminator.load_weights(r'F:\gan\model\ori\breast_ori_model\d200_20200814-154426')
return generator, discriminator
def mv_resunet():
import tensorflow as tf
from tensorflow import keras
import os
import time
import datetime
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
################################# MV-RES U-net #########################################
from tensorflow.keras.backend import int_shape
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Conv2D, Conv3D, MaxPooling2D, MaxPooling3D, UpSampling2D, UpSampling3D, Add, BatchNormalization, Input, Activation, Lambda, Concatenate
def res_unet(filter_root, depth, input_size=(100, 100, 1), activation='relu', batch_norm=True, final_activation='tanh'):
inputs = Input(input_size)
x = inputs
# Dictionary for long connections
long_connection_store = {}
Conv = Conv2D
MaxPooling = MaxPooling2D
UpSampling = UpSampling2D
# Down sampling
for i in range(depth):
out_channel = 2**i * filter_root
res = Conv(out_channel, kernel_size=1, padding='same', use_bias=False, name="Identity{}_1".format(i))(x)
conv1 = Conv(out_channel, kernel_size=3, padding='same', name="dConv{}_1".format(i))(x)
if batch_norm:
conv1 = BatchNormalization(name="BN{}_1".format(i))(conv1)
act1 = Activation(activation, name="Act{}_1".format(i))(conv1)
conv2 = Conv(out_channel, kernel_size=3, padding='same', name="ddConv{}_2".format(i))(act1)
if batch_norm:
conv2 = BatchNormalization(name="BN{}_2".format(i))(conv2)
resconnection = Add(name="Add{}_1".format(i))([res, conv2])
act2 = Activation('selu', name="Act{}_2".format(i))(resconnection)
if i < depth - 1:
long_connection_store[str(i)] = act2
x = downsample(out_channel,3)(act2)
else:
x = act2
# Upsampling
for i in range(depth - 2, -1, -1):
out_channel = 2**(i) * filter_root
long_connection = long_connection_store[str(i)]
# up1 = UpSampling(name="UpSampling{}_1".format(i))(x)
up1 = upsample(out_channel,3)(x)
up_conv1 = Conv(out_channel, 2, activation='relu', padding='same', name="upConv{}_1".format(i))(up1)
if up_conv1.shape[1]==8 or up_conv1.shape[1]==14 or up_conv1.shape[1]==26:
up_conv1 = up_conv1[:,:up_conv1.shape[1]-1,:up_conv1.shape[1]-1,:]
up_conc = Concatenate(axis=-1, name="upConcatenate{}_1".format(i))([up_conv1, long_connection])
up_conv2 = Conv(out_channel, 3, padding='same', name="uupConv{}_1".format(i))(up_conc)
if batch_norm:
up_conv2 = BatchNormalization(name="upBN{}_1".format(i))(up_conv2)
up_act1 = Activation(activation, name="upAct{}_1".format(i))(up_conv2)
up_conv2 = Conv(out_channel, 3, padding='same', name="upConv{}_2".format(i))(up_act1)
if batch_norm:
up_conv2 = BatchNormalization(name="upBN{}_2".format(i))(up_conv2)
res = Conv(out_channel, kernel_size=1, padding='same', use_bias=False, name="upIdentity{}_1".format(i))(up_conc)
resconnection = Add(name="upAdd{}_1".format(i))([res, up_conv2])
x = Activation(activation, name="upAct{}_2".format(i))(resconnection)
if i == 2: x = attention(x, x.shape[3], size=1)
# Final convolution
output = Conv(1, 1, padding='same', activation=final_activation, name='output')(x)
return Model(inputs, outputs=output, name='Res-UNet')
OUTPUT_CHANNELS = 1
def downsample(filters, size, apply_batchnorm=True):
initializer = tf.keras.initializers.he_normal()
result = tf.keras.Sequential()
result.add(
SpectralNormalization(tf.keras.layers.Conv2D(filters, size, strides=2, padding='same',
kernel_initializer=initializer, use_bias=False)))
if apply_batchnorm:
result.add(tf.keras.layers.BatchNormalization())
result.add(tf.keras.layers.LeakyReLU())
return result
def upsample(filters, size, apply_dropout=False):
initializer = tf.keras.initializers.he_normal()
result = tf.keras.Sequential()
result.add(
SpectralNormalization(tf.keras.layers.Conv2DTranspose(filters, size, strides=2,
padding='same', kernel_initializer=initializer, use_bias=False)))
result.add(tf.keras.layers.BatchNormalization())
if apply_dropout: result.add(tf.keras.layers.Dropout(0.5))
result.add(tf.keras.layers.ReLU())
return result
class SpectralNormalization(tf.keras.layers.Wrapper):
def __init__(self, layer, iteration=1, eps=1e-12, training=True, **kwargs):
self.iteration = iteration
self.eps = eps
self.do_power_iteration = training
if not isinstance(layer, tf.keras.layers.Layer):
raise ValueError(
'Please initialize `TimeDistributed` layer with a '
'`Layer` instance. You passed: {input}'.format(input=layer))
super(SpectralNormalization, self).__init__(layer, **kwargs)
def build(self, input_shape):
self.layer.build(input_shape)
self.w = self.layer.kernel
self.w_shape = self.w.shape.as_list()
self.v = self.add_weight(shape=(1, self.w_shape[0] * self.w_shape[1] * self.w_shape[2]),
initializer=tf.initializers.TruncatedNormal(stddev=0.02),
trainable=False,
name='sn_v',
dtype=tf.float32)
self.u = self.add_weight(shape=(1, self.w_shape[-1]),
initializer=tf.initializers.TruncatedNormal(stddev=0.02),
trainable=False,
name='sn_u',
dtype=tf.float32)
super(SpectralNormalization, self).build()
def call(self, inputs):
self.update_weights()
output = self.layer(inputs)
self.restore_weights() # Restore weights because of this formula "W = W - alpha * W_SN`"
return output
def update_weights(self):
w_reshaped = tf.reshape(self.w, [-1, self.w_shape[-1]])
u_hat = self.u
v_hat = self.v # init v vector
if self.do_power_iteration:
for _ in range(self.iteration):
v_ = tf.matmul(u_hat, tf.transpose(w_reshaped))
v_hat = v_ / (tf.reduce_sum(v_**2)**0.5 + self.eps)
u_ = tf.matmul(v_hat, w_reshaped)
u_hat = u_ / (tf.reduce_sum(u_**2)**0.5 + self.eps)
sigma = tf.matmul(tf.matmul(v_hat, w_reshaped), tf.transpose(u_hat))
self.u.assign(u_hat)
self.v.assign(v_hat)
self.layer.kernel.assign(self.w / sigma)
def restore_weights(self):
self.layer.kernel.assign(self.w)
def hw_flatten(i) :
sh_list = tf.shape(i)
return tf.reshape(i, shape=[sh_list[0],sh_list[1]*sh_list[2],sh_list[3]])
def attention(x, channels, size):
initializer = tf.initializers.GlorotNormal()
f = tf.keras.layers.Conv2D(channels // 8, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//8] 8
g = tf.keras.layers.Conv2D(channels // 8, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//8] 8
h = tf.keras.layers.Conv2D(channels, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//2] 32
# N = h * w
s = tf.matmul(hw_flatten(g), hw_flatten(f), transpose_b=True) # [bs, N, N]
beta = tf.nn.softmax(s) # attention map
o = tf.matmul(beta, hw_flatten(h)) # [bs, N, C]
gamma = tf.Variable(initial_value=[1.])#, initial_value=tf.constant(0.0))
o = tf.reshape(o, shape = tf.shape(x)) # [bs, h, w, C]
o = tf.keras.layers.Conv2D(channels, kernel_size=1, strides=1)(o)
x = gamma * o + x
return x
def Discriminator():
initializer = tf.initializers.GlorotNormal()
inp = tf.keras.layers.Input(shape=[100,100,1], name='input_image', dtype="float32")
tar = tf.keras.layers.Input(shape=[100,100,1], name='target_image', dtype="float32")
x = tf.keras.layers.concatenate([inp, tar])
down1 = downsample(64, 4, False)(x) # (bs, 256, 256, 2) -> (bs, 128, 128, 64)
down2 = downsample(128, 4)(down1) # (bs, 128, 128, 64) -> (bs, 64,64, 128)
# att1 = attention(down3, down3.shape[3], size=1)
zero_pad1 = tf.keras.layers.ZeroPadding2D()(down2) # (bs, 34, 34, 256)
conv = tf.keras.layers.Conv2D(256, 4, strides=1,
kernel_initializer=initializer,use_bias=False)(zero_pad1) # (bs, 34-4+1=31, 31, 256)
batchnorm1 = tf.keras.layers.BatchNormalization()(conv)
leaky_relu = tf.keras.layers.LeakyReLU()(batchnorm1)
att2 = attention(leaky_relu, leaky_relu.shape[3], size=1)
zero_pad2 = tf.keras.layers.ZeroPadding2D()(att2) # (bs, 33, 33, 256)
last = tf.keras.layers.Conv2D(1, 4, strides=1,
kernel_initializer=initializer)(zero_pad2) # (bs, 33-4+1=30, 30, 1)
return tf.keras.Model(inputs=[inp, tar], outputs=last)
discriminator = Discriminator()
LAMBDA = 100
loss_object = tf.keras.losses.Hinge()
generator_optimizer = tf.keras.optimizers.Adam(1e-4, beta_1=0.5)
discriminator_optimizer = tf.keras.optimizers.Adam(4e-4, beta_1=0.5)
def generator_loss(disc_generated_output, gen_output, target):
roi_tar = tf.image.central_crop(target, central_fraction=0.3)
roi_gen = tf.image.central_crop(gen_output,central_fraction=0.3)
roi_loss = (LAMBDA*1.2) *tf.reduce_mean(tf.abs(roi_tar-roi_gen))
gan_loss = loss_object(tf.ones_like(disc_generated_output), disc_generated_output)
l1_loss = LAMBDA * tf.reduce_mean(tf.abs(target - gen_output))
total_gen_loss = gan_loss + l1_loss + roi_loss
return total_gen_loss, gan_loss, l1_loss, roi_loss
def discriminator_loss(disc_real_output, disc_generated_output):
real_loss = loss_object(tf.ones_like(disc_real_output), disc_real_output)
generated_loss = loss_object(tf.zeros_like(disc_generated_output), disc_generated_output)
total_disc_loss = real_loss + generated_loss
return total_disc_loss, generated_loss, real_loss
res_generator = res_unet(64,4)
res_discriminator = Discriminator()
res_generator.load_weights(r'F:\gan\model\mv\breast_mv_res_model\g316_20200901-185218')
res_discriminator.load_weights(r'F:\gan\model\mv\breast_mv_res_model\d316_20200901-185219')
return res_generator, res_discriminator
def mv_sagan():
import tensorflow as tf
from tensorflow import keras
import os
import time
import datetime
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
################################### SAGAN BOX LOSS ##################################
OUTPUT_CHANNELS = 1
def downsample(filters, size, apply_batchnorm=True):
initializer = tf.keras.initializers.he_normal()
result = tf.keras.Sequential()
result.add(
SpectralNormalization(tf.keras.layers.Conv2D(filters, size, strides=2, padding='same',
kernel_initializer=initializer, use_bias=False)))
if apply_batchnorm:
result.add(tf.keras.layers.BatchNormalization())
result.add(tf.keras.layers.LeakyReLU())
return result
def upsample(filters, size, apply_dropout=False):
initializer = tf.keras.initializers.he_normal()
result = tf.keras.Sequential()
result.add(
SpectralNormalization(tf.keras.layers.Conv2DTranspose(filters, size, strides=2,
padding='same', kernel_initializer=initializer, use_bias=False)))
result.add(tf.keras.layers.BatchNormalization())
if apply_dropout: result.add(tf.keras.layers.Dropout(0.5))
result.add(tf.keras.layers.ReLU())
return result
class SpectralNormalization(tf.keras.layers.Wrapper):
def __init__(self, layer, iteration=1, eps=1e-12, training=True, **kwargs):
self.iteration = iteration
self.eps = eps
self.do_power_iteration = training
if not isinstance(layer, tf.keras.layers.Layer):
raise ValueError(
'Please initialize `TimeDistributed` layer with a '
'`Layer` instance. You passed: {input}'.format(input=layer))
super(SpectralNormalization, self).__init__(layer, **kwargs)
def build(self, input_shape):
self.layer.build(input_shape)
self.w = self.layer.kernel
self.w_shape = self.w.shape.as_list()
self.v = self.add_weight(shape=(1, self.w_shape[0] * self.w_shape[1] * self.w_shape[2]),
initializer=tf.initializers.TruncatedNormal(stddev=0.02),
trainable=False,
name='sn_v',
dtype=tf.float32)
self.u = self.add_weight(shape=(1, self.w_shape[-1]),
initializer=tf.initializers.TruncatedNormal(stddev=0.02),
trainable=False,
name='sn_u',
dtype=tf.float32)
super(SpectralNormalization, self).build()
def call(self, inputs):
self.update_weights()
output = self.layer(inputs)
self.restore_weights() # Restore weights because of this formula "W = W - alpha * W_SN`"
return output
def update_weights(self):
w_reshaped = tf.reshape(self.w, [-1, self.w_shape[-1]])
u_hat = self.u
v_hat = self.v # init v vector
if self.do_power_iteration:
for _ in range(self.iteration):
v_ = tf.matmul(u_hat, tf.transpose(w_reshaped))
v_hat = v_ / (tf.reduce_sum(v_**2)**0.5 + self.eps)
u_ = tf.matmul(v_hat, w_reshaped)
u_hat = u_ / (tf.reduce_sum(u_**2)**0.5 + self.eps)
sigma = tf.matmul(tf.matmul(v_hat, w_reshaped), tf.transpose(u_hat))
self.u.assign(u_hat)
self.v.assign(v_hat)
self.layer.kernel.assign(self.w / sigma)
def restore_weights(self):
self.layer.kernel.assign(self.w)
def hw_flatten(i) :
sh_list = tf.shape(i)
return tf.reshape(i, shape=[sh_list[0],sh_list[1]*sh_list[2],sh_list[3]])
def attention(x, channels, size):
initializer = tf.initializers.GlorotNormal()
f = tf.keras.layers.Conv2D(channels // 8, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//8] 8
g = tf.keras.layers.Conv2D(channels // 8, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//8] 8
h = tf.keras.layers.Conv2D(channels, size, strides=1, padding='same',
kernel_initializer=initializer, use_bias=False)(x) # [bs, h, w, c//2] 32
# N = h * w
s = tf.matmul(hw_flatten(g), hw_flatten(f), transpose_b=True) # [bs, N, N]
beta = tf.nn.softmax(s) # attention map
o = tf.matmul(beta, hw_flatten(h)) # [bs, N, C]
gamma = tf.Variable(initial_value=[1.])#, initial_value=tf.constant(0.0))
o = tf.reshape(o, shape = tf.shape(x)) # [bs, h, w, C]
o = tf.keras.layers.Conv2D(channels, kernel_size=1, strides=1)(o)
x = gamma * o + x
return x
def Generator():
inputs = tf.keras.layers.Input(shape=[100,100,1], dtype="float32")
down_stack = [
downsample(32, 4, apply_batchnorm=False), # (bs, 64, 64, 32)
downsample(64, 4), # (bs, 32, 32, 64)
downsample(128, 4), # (bs, 16, 16, 128)
downsample(256, 4), # (bs, 8, 8, 256)
downsample(256, 4), # (bs, 4, 4, 256)
downsample(256, 4), # (bs, 2, 2, 256)
downsample(256, 4), # (bs, 1, 1, 256)
]
up_stack = [
upsample(256, 4, apply_dropout=True), # (bs, 4, 4, 256)
upsample(256, 4, apply_dropout=True), # (bs, 8, 8, 256)
upsample(256, 4), # (bs, 16, 16, 256)
upsample(128, 4), # (bs, 32, 32, 128)
upsample(64, 4), # (bs, 64, 64, 64)
# self-attention layer
upsample(32, 4), # (bs,128, 128, 32)
# self-attention layer
]
# initializer = tf.random_normal_initializer(0., 0.02)
initializer = tf.initializers.GlorotNormal()
# initializer = tf.keras.initializers.he_normal()
last = tf.keras.layers.Conv2DTranspose(OUTPUT_CHANNELS, 4,
strides=2,
padding='same',
kernel_initializer=initializer, activation='tanh') # (bs, 256,256,1)
x = inputs
# Downsampling through the model
skips = []
for down in down_stack:
x = down(x)
skips.append(x)
skips = reversed(skips[:-1])
cnt = 1
# Upsampling and establishing the skip connections
for up, skip in zip(up_stack, skips):
x = up(x)
if x.shape[1]==8 or x.shape[1]==14 or x.shape[1]==26:
x = x[:,:x.shape[1]-1,:x.shape[1]-1,:]
x = tf.keras.layers.Concatenate()([x,skip])
if x.shape[3] == 128 :#or x.shape[3] == 32
x_pre = x
x = attention(x, x.shape[3], size=1)
x = last(x)
x = tf.cast(x, tf.float32)
return tf.keras.Model(inputs=inputs, outputs=x)
def Discriminator():
initializer = tf.initializers.GlorotNormal()
inp = tf.keras.layers.Input(shape=[100,100,1], name='input_image', dtype="float32")
tar = tf.keras.layers.Input(shape=[100,100,1], name='target_image', dtype="float32")
x = tf.keras.layers.concatenate([inp, tar])
down1 = downsample(64, 4, False)(x) # (bs, 256, 256, 2) -> (bs, 128, 128, 64)
down2 = downsample(128, 4)(down1) # (bs, 128, 128, 64) -> (bs, 64,64, 128)
# att1 = attention(down3, down3.shape[3], size=1)
zero_pad1 = tf.keras.layers.ZeroPadding2D()(down2) # (bs, 34, 34, 256)
conv = tf.keras.layers.Conv2D(256, 4, strides=1,
kernel_initializer=initializer,use_bias=False)(zero_pad1) # (bs, 34-4+1=31, 31, 256)
batchnorm1 = tf.keras.layers.BatchNormalization()(conv)
leaky_relu = tf.keras.layers.LeakyReLU()(batchnorm1)
att2 = attention(leaky_relu, leaky_relu.shape[3], size=1)
zero_pad2 = tf.keras.layers.ZeroPadding2D()(att2) # (bs, 33, 33, 256)
last = tf.keras.layers.Conv2D(1, 4, strides=1,
kernel_initializer=initializer)(zero_pad2) # (bs, 33-4+1=30, 30, 1)
return tf.keras.Model(inputs=[inp, tar], outputs=last)
LAMBDA = 100
loss_object = tf.keras.losses.Hinge()
generator_optimizer = tf.keras.optimizers.Adam(1e-4, beta_1=0.5)
discriminator_optimizer = tf.keras.optimizers.Adam(4e-4, beta_1=0.5)
generator_sa = Generator()
discriminator_sa = Discriminator()
generator_sa.load_weights(r'F:\gan\model\mv\breast_mv_loss_model\g350_20200902-142131')
discriminator_sa.load_weights(r'F:\gan\model\mv\breast_mv_loss_model\d350_20200902-142133')
return generator_sa, discriminator_sa
def mv_origin():
import tensorflow as tf
from tensorflow import keras
import os
import time
import datetime
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
####################### MV - original NET ###############################
OUTPUT_CHANNELS = 1
def downsample(filters, size, apply_batchnorm=True):
initializer = tf.random_normal_initializer(0., 0.02)
result = tf.keras.Sequential()
result.add(
tf.keras.layers.Conv2D(filters, size, strides=2, padding='same',
kernel_initializer=initializer, use_bias=False))
if apply_batchnorm:
result.add(tf.keras.layers.BatchNormalization())
result.add(tf.keras.layers.LeakyReLU())
return result
def upsample(filters, size, apply_dropout=False):
initializer = tf.random_normal_initializer(0., 0.02)
result = tf.keras.Sequential()
result.add(
tf.keras.layers.Conv2DTranspose(filters, size, strides=2, padding='same',
kernel_initializer=initializer,
use_bias=False))
result.add(tf.keras.layers.BatchNormalization())
if apply_dropout:
result.add(tf.keras.layers.Dropout(0.5))
result.add(tf.keras.layers.ReLU())
return result
def Generator():
inputs = tf.keras.layers.Input(shape=[100,100,1], dtype="float32")
down_stack = [
downsample(32, 4, apply_batchnorm=False), # (bs, 50, 50, 32)
downsample(64, 4), # (bs, 25, 25, 64)
downsample(128, 4), # (bs, 13, 13, 128)
downsample(256, 4), # (bs, 7, 7, 256)
downsample(256, 4), # (bs, 4, 4, 256)
downsample(256, 4), #(bs, 2, 2, 256)
downsample(256, 4) #(bs, 1, 1, 256)
]
up_stack = [
upsample(256, 4, apply_dropout=True),
upsample(256, 4, apply_dropout=True), # (bs, 2, 2, 256)
upsample(256, 4), # (bs, 4, 4, 256)
upsample(128, 4), # (bs, 7, 7, 128)
upsample(64, 4), # (bs, 24, 24, 64)
upsample(32, 4), # (bs, 25->50, 25->50, 32)
]
initializer = tf.random_normal_initializer(0., 0.02)
last = tf.keras.layers.Conv2DTranspose(OUTPUT_CHANNELS, 4,
strides=2,
padding='same',
kernel_initializer=initializer, activation='tanh') # (bs, 128,128,1)
x = inputs
# Downsampling through the model
skips = []
for down in down_stack:
x = down(x)
skips.append(x)
# print('downsampling',x.shape)
skips = reversed(skips[:-1])
# Upsampling and establishing the skip connections
for up, skip in zip(up_stack, skips):
x = up(x)
if x.shape[1]==8 or x.shape[1]==14 or x.shape[1]==26:
x = x[:,:x.shape[1]-1,:x.shape[1]-1,:]
# print('upsampling',x.shape)
x = tf.keras.layers.Concatenate()([x, skip])
# print(' after concat',x.shape)
x = last(x)
x = tf.cast(x, tf.float32)
return tf.keras.Model(inputs=inputs, outputs=x)
def Discriminator():
initializer = tf.random_normal_initializer(0., 0.02)
inp = tf.keras.layers.Input(shape=[100,100,1], name='input_image', dtype="float32")
tar = tf.keras.layers.Input(shape=[100,100,1], name='target_image', dtype="float32")
x = tf.keras.layers.concatenate([inp, tar]) # (bs, 100,100,channel*2=2)
down1 = downsample(64, 4, False)(x) # (bs, 50,50, 64) batchnorm = False ??
down2 = downsample(128, 4)(down1) # (bs, 25, 25, 128)
zero_pad1 = tf.keras.layers.ZeroPadding2D()(down2) # (bs, 27, 27, 256)
conv = tf.keras.layers.Conv2D(256, 4, strides=1,
kernel_initializer=initializer,
use_bias=False)(zero_pad1) # (bs, 27-4+1=24, 24, 256)
batchnorm1 = tf.keras.layers.BatchNormalization()(conv)
leaky_relu = tf.keras.layers.LeakyReLU()(batchnorm1)
zero_pad2 = tf.keras.layers.ZeroPadding2D()(leaky_relu) # (bs, 26, 26, 256)
last = tf.keras.layers.Conv2D(1, 4, strides=1,
kernel_initializer=initializer)(zero_pad2) # (bs, 23, 23, 1)
return tf.keras.Model(inputs=[inp, tar], outputs=last)
generator = Generator()
discriminator = Discriminator()
generator.load_weights(r'F:\gan\model\mv\breast_xyz_model\g380_20200826-200239')
discriminator.load_weights(r'F:\gan\model\mv\breast_xyz_model\d380_20200826-200240')
return generator, discriminator
| 41.886683
| 175
| 0.57726
| 6,408
| 50,641
| 4.424313
| 0.056492
| 0.043455
| 0.060527
| 0.022786
| 0.958979
| 0.949914
| 0.948221
| 0.944446
| 0.938979
| 0.929209
| 0
| 0.057907
| 0.288995
| 50,641
| 1,209
| 176
| 41.886683
| 0.72949
| 0.082779
| 0
| 0.934463
| 0
| 0
| 0.040674
| 0.014281
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071186
| false
| 0.00452
| 0.062147
| 0
| 0.19096
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48ffe94fb42e38e617266ec8ade9fcf3bb550c59
| 35,708
|
py
|
Python
|
app/routes.py
|
damnMeddlingKid/Covid19CanadaAPI
|
3497c42567684e7089388eafb5d8a2c8683c3125
|
[
"MIT"
] | null | null | null |
app/routes.py
|
damnMeddlingKid/Covid19CanadaAPI
|
3497c42567684e7089388eafb5d8a2c8683c3125
|
[
"MIT"
] | null | null | null |
app/routes.py
|
damnMeddlingKid/Covid19CanadaAPI
|
3497c42567684e7089388eafb5d8a2c8683c3125
|
[
"MIT"
] | null | null | null |
from app import app
from flask import request
from flask_cors import CORS
import pandas as pd
from datetime import datetime
from functools import reduce
CORS(app) # enable CORS for all routes
province = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/other/prov_map.csv")[['province_short', 'province']].set_index(['province_short']).to_dict('index')
health_region = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/other/hr_map.csv")[['HR_UID', 'province', 'health_region']]
health_region.loc[health_region.HR_UID == 9999, "province"] = "All Provinces"
health_region = health_region.drop_duplicates().set_index(['HR_UID'])
health_region.index = health_region.index.map(str)
health_region = health_region.to_dict('index')
@app.route('/')
@app.route('/index')
def index():
dfs = []
response = {}
version_df = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/update_time.txt", sep="\t", header=None)
datetime_str = version_df.head().values[0][0].split(' ')[0]
date = datetime.strptime(datetime_str, '%Y-%m-%d')
df_cases = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/cases_timeseries_canada.csv")
df_cases.rename(columns={"date_report":"date"},inplace=True)
df_mortality = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/mortality_timeseries_canada.csv")
df_mortality.rename(columns={"date_death_report":"date"},inplace=True)
df_recovered = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/recovered_timeseries_canada.csv")
df_recovered.rename(columns={"date_recovered":"date"},inplace=True)
df_testing = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/testing_timeseries_canada.csv")
df_testing.rename(columns={"date_testing":"date"},inplace=True)
df_active = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/active_timeseries_canada.csv")
df_active.rename(columns={"date_active":"date"},inplace=True)
df_active = df_active[['province', 'date', 'active_cases','active_cases_change']]
df_avaccine = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/vaccine_administration_timeseries_canada.csv")
df_avaccine.rename(columns={"date_vaccine_administered":"date"},inplace=True)
df_dvaccine = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/vaccine_distribution_timeseries_canada.csv")
df_dvaccine.rename(columns={"date_vaccine_distributed":"date"},inplace=True)
df_tomerge = [df_cases, df_mortality, df_recovered, df_testing, df_active, df_avaccine, df_dvaccine]
df_final = reduce(lambda left, right: pd.merge(left, right, on=['date', 'province'], how='outer'), df_tomerge)
df_final['date'] = pd.to_datetime(df_final['date'], dayfirst=True)
df = df_final.fillna("NULL")
df = df.loc[df.date == date]
version = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/update_time.txt", sep="\t", header=None)
response["version"] = version.head().values[0][0]
df['date'] = df.date.dt.strftime('%d-%m-%Y')
response["summary"] = df.to_dict(orient='records')
return response
@app.route('/individual')
def individual():
return "Individual level data return is temporarily disabled, please download from GitHub: https://github.com/ishaberry/Covid19Canada", 404
#stat = request.args.get('stat')
#loc = request.args.get('loc')
#date = request.args.get('date')
#ymd = request.args.get('ymd')
#extra = request.args.get('extra')
#if date:
#try:
#date = datetime.strptime(date, '%d-%m-%Y')
#except:
#try:
#date = datetime.strptime(date, '%Y-%m-%d')
#except:
#date = None
#after = request.args.get('after')
#if after:
#try:
#after = datetime.strptime(after, '%d-%m-%Y')
#except:
#try:
#after = datetime.strptime(after, '%Y-%m-%d')
#except:
#after = None
#before = request.args.get('before')
#if before:
#try:
#before = datetime.strptime(before, '%d-%m-%Y')
#except:
#try:
#before = datetime.strptime(before, '%Y-%m-%d')
#except:
#before = None
#version = request.args.get('version')
#dfs = []
#response = {}
#if stat == 'cases':
#cases = pd.read_csv("https://raw.githubusercontent.com/ishaberry/Covid19Canada/master/cases.csv")
#cases['date_report'] = pd.to_datetime(cases['date_report'],dayfirst=True)
#cases['report_week'] = pd.to_datetime(cases['report_week'],dayfirst=True)
#if extra and extra=='false':
#pass
#else:
#case_source = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/other/cases_extra/cases_case_source.csv")[['case_source_short', 'case_source_full']]
#cases = cases.merge(case_source, left_on='case_source', right_on='case_source_short', how='left').drop(columns=['case_source', 'case_source_short']).rename(columns={'case_source_full': 'case_source'})
#dfs.append(cases)
#elif stat =='mortality':
#mortality = pd.read_csv("https://raw.githubusercontent.com/ishaberry/Covid19Canada/master/mortality.csv")
#mortality['date_death_report'] = pd.to_datetime(mortality['date_death_report'],dayfirst=True)
#if extra and extra=='false':
#pass
#else:
#death_source = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/other/mortality_extra/mortality_death_source.csv")[['death_source_short', 'death_source_full']]
#mortality = mortality.merge(death_source, left_on='death_source', right_on='death_source_short', how='left').drop(columns=['death_source', 'death_source_short']).rename(columns={'death_source_full': 'death_source'})
#dfs.append(mortality)
#else:
#cases = pd.read_csv("https://raw.githubusercontent.com/ishaberry/Covid19Canada/master/cases.csv")
#mortality = pd.read_csv("https://raw.githubusercontent.com/ishaberry/Covid19Canada/master/mortality.csv")
#cases['date_report'] = pd.to_datetime(cases['date_report'],dayfirst=True)
#mortality['date_death_report'] = pd.to_datetime(mortality['date_death_report'],dayfirst=True)
#if extra and extra=='false':
#pass
#else:
#case_source = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/other/cases_extra/cases_case_source.csv")[['case_source_short', 'case_source_full']]
#cases = cases.merge(case_source, left_on='case_source', right_on='case_source_short', how='left').drop(columns=['case_source', 'case_source_short']).rename(columns={'case_source_full': 'case_source'})
#death_source = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/other/mortality_extra/mortality_death_source.csv")[['death_source_short', 'death_source_full']]
#mortality = mortality.merge(death_source, left_on='death_source', right_on='death_source_short', how='left').drop(columns=['death_source', 'death_source_short']).rename(columns={'death_source_full': 'death_source'})
#dfs.append(cases)
#dfs.append(mortality)
#for df in dfs:
#df = df.fillna("NULL")
#if loc:
#if loc in province.keys():
#df = df.loc[df.province == province[loc]['province']]
#elif loc in health_region.keys():
#df = df.loc[df.health_region == health_region[loc]['health_region']]
#if loc != '9999':
#df = df.loc[df.province == health_region[loc]['province']]
#if date:
#if 'date_report' in df.columns:
#df = df.loc[df.date_report == date]
#if 'date_death_report' in df.columns:
#df = df.loc[df.date_death_report == date]
#if after:
#if 'date_report' in df.columns:
#df = df.loc[df.date_report >= after]
#if 'date_death_report' in df.columns:
#df = df.loc[df.date_death_report >= after]
#if before:
#if 'date_report' in df.columns:
#df = df.loc[df.date_report <= before]
#if 'date_death_report' in df.columns:
#df = df.loc[df.date_death_report <= before]
#if version:
#if version=='true':
#version = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/update_time.txt", sep="\t", header=None)
#response["version"] = version.head().values[0][0]
#if 'date_report' in df.columns:
#if ymd and ymd=='true':
#df['date_report'] = df.date_report.dt.strftime('%Y-%m-%d')
#df['report_week'] = df.report_week.dt.strftime('%Y-%m-%d')
#else:
#df['date_report'] = df.date_report.dt.strftime('%d-%m-%Y')
#df['report_week'] = df.report_week.dt.strftime('%d-%m-%Y')
#if 'date_death_report' in df.columns:
#if ymd and ymd=='true':
#df['date_death_report'] = df.date_death_report.dt.strftime('%Y-%m-%d')
#else:
#df['date_death_report'] = df.date_death_report.dt.strftime('%d-%m-%Y')
#if 'date_report' in df.columns:
#response["cases"] = df.to_dict(orient='records')
#if 'date_death_report' in df.columns:
#response["mortality"] = df.to_dict(orient='records')
#return response
@app.route('/timeseries')
def timeseries():
stat = request.args.get('stat')
loc = request.args.get('loc')
date = request.args.get('date')
ymd = request.args.get('ymd')
if date:
try:
date = datetime.strptime(date, '%d-%m-%Y')
except:
try:
date = datetime.strptime(date, '%Y-%m-%d')
except:
date = None
after = request.args.get('after')
if after:
try:
after = datetime.strptime(after, '%d-%m-%Y')
except:
try:
after = datetime.strptime(after, '%Y-%m-%d')
except:
after = None
before = request.args.get('before')
if before:
try:
before = datetime.strptime(before, '%d-%m-%Y')
except:
try:
before = datetime.strptime(before, '%Y-%m-%d')
except:
before = None
version = request.args.get('version')
dfs = []
response = {}
if not loc:
loc = 'prov'
if stat == 'cases':
if loc == 'canada':
cases_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/cases_timeseries_canada.csv")
cases_can['date_report'] = pd.to_datetime(cases_can['date_report'], dayfirst=True)
dfs.append(cases_can)
elif loc == 'prov' or loc in province.keys():
cases_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/cases_timeseries_prov.csv")
cases_prov['date_report'] = pd.to_datetime(cases_prov['date_report'], dayfirst=True)
dfs.append(cases_prov)
elif loc == 'hr' or loc in health_region.keys():
cases_hr = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_hr/cases_timeseries_hr.csv")
cases_hr['date_report'] = pd.to_datetime(cases_hr['date_report'], dayfirst=True)
dfs.append(cases_hr)
else:
return "Record not found", 404
elif stat =='mortality':
if loc == 'canada':
mortality_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/mortality_timeseries_canada.csv")
mortality_can['date_death_report'] = pd.to_datetime(mortality_can['date_death_report'], dayfirst=True)
dfs.append(mortality_can)
elif loc == 'prov' or loc in province.keys():
mortality_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/mortality_timeseries_prov.csv")
mortality_prov['date_death_report'] = pd.to_datetime(mortality_prov['date_death_report'], dayfirst=True)
dfs.append(mortality_prov)
elif loc == 'hr' or loc in health_region.keys():
mortality_hr = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_hr/mortality_timeseries_hr.csv")
mortality_hr['date_death_report'] = pd.to_datetime(mortality_hr['date_death_report'], dayfirst=True)
dfs.append(mortality_hr)
else:
return "Record not found", 404
elif stat =='recovered':
if loc == 'canada':
recovered_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/recovered_timeseries_canada.csv")
recovered_can['date_recovered'] = pd.to_datetime(recovered_can['date_recovered'], dayfirst=True)
dfs.append(recovered_can)
elif loc == 'prov' or loc in province.keys():
recovered_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/recovered_timeseries_prov.csv")
recovered_prov['date_recovered'] = pd.to_datetime(recovered_prov['date_recovered'], dayfirst=True)
dfs.append(recovered_prov)
else:
return "Record not found", 404
elif stat =='testing':
if loc == 'canada':
testing_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/testing_timeseries_canada.csv")
testing_can['date_testing'] = pd.to_datetime(testing_can['date_testing'], dayfirst=True)
dfs.append(testing_can)
elif loc == 'prov' or loc in province.keys():
testing_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/testing_timeseries_prov.csv")
testing_prov['date_testing'] = pd.to_datetime(testing_prov['date_testing'], dayfirst=True)
dfs.append(testing_prov)
elif stat =='active':
if loc == 'canada':
active_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/active_timeseries_canada.csv")
active_can['date_active'] = pd.to_datetime(active_can['date_active'], dayfirst=True)
dfs.append(active_can)
elif loc == 'prov' or loc in province.keys():
active_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/active_timeseries_prov.csv")
active_prov['date_active'] = pd.to_datetime(active_prov['date_active'], dayfirst=True)
dfs.append(active_prov)
elif stat == 'avaccine':
if loc == 'canada':
avaccine_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/vaccine_administration_timeseries_canada.csv")
avaccine_can['date_vaccine_administered'] = pd.to_datetime(avaccine_can['date_vaccine_administered'], dayfirst=True)
dfs.append(avaccine_can)
elif loc == 'prov' or loc in province.keys():
avaccine_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/vaccine_administration_timeseries_prov.csv")
avaccine_prov['date_vaccine_administered'] = pd.to_datetime(avaccine_prov['date_vaccine_administered'], dayfirst=True)
dfs.append(avaccine_prov)
elif stat == 'dvaccine':
if loc == 'canada':
dvaccine_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/vaccine_distribution_timeseries_canada.csv")
dvaccine_can['date_vaccine_distributed'] = pd.to_datetime(dvaccine_can['date_vaccine_distributed'], dayfirst=True)
dfs.append(dvaccine_can)
elif loc == 'prov' or loc in province.keys():
dvaccine_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/vaccine_distribution_timeseries_prov.csv")
dvaccine_prov['date_vaccine_distributed'] = pd.to_datetime(dvaccine_prov['date_vaccine_distributed'], dayfirst=True)
dfs.append(dvaccine_prov)
else:
if loc == 'canada':
cases_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/cases_timeseries_canada.csv")
cases_can['date_report'] = pd.to_datetime(cases_can['date_report'], dayfirst=True)
dfs.append(cases_can)
elif loc == 'prov' or loc in province.keys():
cases_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/cases_timeseries_prov.csv")
cases_prov['date_report'] = pd.to_datetime(cases_prov['date_report'], dayfirst=True)
dfs.append(cases_prov)
elif loc == 'hr' or loc in health_region.keys():
cases_hr = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_hr/cases_timeseries_hr.csv")
cases_hr['date_report'] = pd.to_datetime(cases_hr['date_report'], dayfirst=True)
dfs.append(cases_hr)
if loc == 'canada':
mortality_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/mortality_timeseries_canada.csv")
mortality_can['date_death_report'] = pd.to_datetime(mortality_can['date_death_report'], dayfirst=True)
dfs.append(mortality_can)
elif loc == 'prov' or loc in province.keys():
mortality_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/mortality_timeseries_prov.csv")
mortality_prov['date_death_report'] = pd.to_datetime(mortality_prov['date_death_report'], dayfirst=True)
dfs.append(mortality_prov)
elif loc == 'hr' or loc in health_region.keys():
mortality_hr = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_hr/mortality_timeseries_hr.csv")
mortality_hr['date_death_report'] = pd.to_datetime(mortality_hr['date_death_report'], dayfirst=True)
dfs.append(mortality_hr)
else:
return "Record not found", 404
if loc == 'canada':
recovered_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/recovered_timeseries_canada.csv")
recovered_can['date_recovered'] = pd.to_datetime(recovered_can['date_recovered'], dayfirst=True)
dfs.append(recovered_can)
elif loc == 'prov' or loc in province.keys():
recovered_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/recovered_timeseries_prov.csv")
recovered_prov['date_recovered'] = pd.to_datetime(recovered_prov['date_recovered'], dayfirst=True)
dfs.append(recovered_prov)
if loc == 'canada':
testing_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/testing_timeseries_canada.csv")
testing_can['date_testing'] = pd.to_datetime(testing_can['date_testing'], dayfirst=True)
dfs.append(testing_can)
elif loc == 'prov' or loc in province.keys():
testing_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/testing_timeseries_prov.csv")
testing_prov['date_testing'] = pd.to_datetime(testing_prov['date_testing'], dayfirst=True)
dfs.append(testing_prov)
if loc == 'canada':
active_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/active_timeseries_canada.csv")
active_can['date_active'] = pd.to_datetime(active_can['date_active'], dayfirst=True)
dfs.append(active_can)
elif loc == 'prov' or loc in province.keys():
active_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/active_timeseries_prov.csv")
active_prov['date_active'] = pd.to_datetime(active_prov['date_active'], dayfirst=True)
dfs.append(active_prov)
if loc == 'canada':
avaccine_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/vaccine_administration_timeseries_canada.csv")
avaccine_can['date_vaccine_administered'] = pd.to_datetime(avaccine_can['date_vaccine_administered'], dayfirst=True)
dfs.append(avaccine_can)
elif loc == 'prov' or loc in province.keys():
avaccine_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/vaccine_administration_timeseries_prov.csv")
avaccine_prov['date_vaccine_administered'] = pd.to_datetime(avaccine_prov['date_vaccine_administered'], dayfirst=True)
dfs.append(avaccine_prov)
if loc == 'canada':
dvaccine_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/vaccine_distribution_timeseries_canada.csv")
dvaccine_can['date_vaccine_distributed'] = pd.to_datetime(dvaccine_can['date_vaccine_distributed'], dayfirst=True)
dfs.append(dvaccine_can)
elif loc == 'prov' or loc in province.keys():
dvaccine_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/vaccine_distribution_timeseries_prov.csv")
dvaccine_prov['date_vaccine_distributed'] = pd.to_datetime(dvaccine_prov['date_vaccine_distributed'], dayfirst=True)
dfs.append(dvaccine_prov)
for df in dfs:
df = df.fillna("NULL")
if loc:
if loc == 'canada':
df
elif loc == 'prov':
df
elif loc == 'hr':
df
elif loc in province.keys():
df = df.loc[df.province == province[loc]['province']]
elif loc in health_region.keys():
df = df.loc[df.health_region == health_region[loc]['health_region']]
if loc != '9999':
df = df.loc[df.province == health_region[loc]['province']]
else:
return "Record not found", 404
if date:
if 'date_report' in df.columns:
df = df.loc[df.date_report == date]
if 'date_death_report' in df.columns:
df = df.loc[df.date_death_report == date]
if 'date_active' in df.columns:
df = df.loc[df.date_active == date]
if 'date_recovered' in df.columns:
df = df.loc[df.date_recovered == date]
if 'date_testing' in df.columns:
df = df.loc[df.date_testing == date]
if 'date_vaccine_administered' in df.columns:
df = df.loc[df.date_vaccine_administered == date]
if 'date_vaccine_distributed' in df.columns:
df = df.loc[df.date_vaccine_distributed == date]
if after:
if 'date_report' in df.columns:
df = df.loc[df.date_report >= after]
if 'date_death_report' in df.columns:
df = df.loc[df.date_death_report >= after]
if 'date_active' in df.columns:
df = df.loc[df.date_active >= after]
if 'date_recovered' in df.columns:
df = df.loc[df.date_recovered >= after]
if 'date_testing' in df.columns:
df = df.loc[df.date_testing >= after]
if 'date_vaccine_administered' in df.columns:
df = df.loc[df.date_vaccine_administered>= date]
if 'date_vaccine_distributed' in df.columns:
df = df.loc[df.date_vaccine_distributed >= date]
if before:
if 'date_report' in df.columns:
df = df.loc[df.date_report <= before]
if 'date_death_report' in df.columns:
df = df.loc[df.date_death_report <= before]
if 'date_active' in df.columns:
df = df.loc[df.date_active <= before]
if 'date_recovered' in df.columns:
df = df.loc[df.date_recovered <= before]
if 'date_testing' in df.columns:
df = df.loc[df.date_testing <= before]
if 'date_vaccine_administered' in df.columns:
df = df.loc[df.date_vaccine_administered <= date]
if 'date_vaccine_distributed' in df.columns:
df = df.loc[df.date_vaccine_distributed <= date]
if version:
if version=='true':
data = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/update_time.txt", sep="\t", header=None)
response["version"] = data.head().values[0][0]
if 'date_report' in df.columns:
if ymd and ymd=='true':
df['date_report'] = df.date_report.dt.strftime('%Y-%m-%d')
else:
df['date_report'] = df.date_report.dt.strftime('%d-%m-%Y')
if 'date_death_report' in df.columns:
if ymd and ymd=='true':
df['date_death_report'] = df.date_death_report.dt.strftime('%Y-%m-%d')
else:
df['date_death_report'] = df.date_death_report.dt.strftime('%d-%m-%Y')
if 'date_recovered' in df.columns:
if ymd and ymd=='true':
df['date_recovered'] = df.date_recovered.dt.strftime('%Y-%m-%d')
else:
df['date_recovered'] = df.date_recovered.dt.strftime('%d-%m-%Y')
if 'date_testing' in df.columns:
if ymd and ymd=='true':
df['date_testing'] = df.date_testing.dt.strftime('%Y-%m-%d')
else:
df['date_testing'] = df.date_testing.dt.strftime('%d-%m-%Y')
if 'date_active' in df.columns:
if ymd and ymd=='true':
df['date_active'] = df.date_active.dt.strftime('%Y-%m-%d')
else:
df['date_active'] = df.date_active.dt.strftime('%d-%m-%Y')
if 'date_vaccine_administered' in df.columns:
if ymd and ymd=='true':
df['date_vaccine_administered'] = df.date_vaccine_administered.dt.strftime('%Y-%m-%d')
else:
df['date_vaccine_administered'] = df.date_vaccine_administered.dt.strftime('%d-%m-%Y')
if 'date_vaccine_distributed' in df.columns:
if ymd and ymd=='true':
df['date_vaccine_distributed'] = df.date_vaccine_distributed.dt.strftime('%Y-%m-%d')
else:
df['date_vaccine_distributed'] = df.date_vaccine_distributed.dt.strftime('%d-%m-%Y')
if 'date_report' in df.columns:
response["cases"] = df.to_dict(orient='records')
if 'date_death_report' in df.columns:
response["mortality"] = df.to_dict(orient='records')
if 'date_recovered' in df.columns:
response["recovered"] = df.to_dict(orient='records')
if 'date_testing' in df.columns:
response["testing"] = df.to_dict(orient='records')
if 'date_active' in df.columns:
response["active"] = df.to_dict(orient='records')
if 'date_vaccine_administered' in df.columns:
response["avaccine"] = df.to_dict(orient='records')
if 'date_vaccine_distributed' in df.columns:
response["dvaccine"] = df.to_dict(orient='records')
return response
@app.route('/summary')
def summary():
loc = request.args.get('loc')
date = request.args.get('date')
ymd = request.args.get('ymd')
if date:
try:
date = datetime.strptime(date, '%d-%m-%Y')
except:
try:
date = datetime.strptime(date, '%Y-%m-%d')
except:
date = None
after = request.args.get('after')
if after:
try:
after = datetime.strptime(after, '%d-%m-%Y')
except:
try:
after = datetime.strptime(after, '%Y-%m-%d')
except:
after = None
before = request.args.get('before')
if before:
try:
before = datetime.strptime(before, '%d-%m-%Y')
except:
try:
before = datetime.strptime(before, '%Y-%m-%d')
except:
before = None
version = request.args.get('version')
dfs = []
response = {}
if not loc:
loc = 'prov'
if not date and not after and not before:
version_df = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/update_time.txt", sep="\t", header=None)
datetime_str = version_df.head().values[0][0].split(' ')[0]
date = datetime.strptime(datetime_str, '%Y-%m-%d')
if loc == 'canada':
df_cases = cases_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/cases_timeseries_canada.csv")
df_cases.rename(columns={"date_report":"date"},inplace=True)
elif loc == 'prov' or loc in province.keys():
df_cases = cases_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/cases_timeseries_prov.csv")
df_cases.rename(columns={"date_report":"date"},inplace=True)
elif loc == 'hr' or loc in health_region.keys():
df_cases = cases_hr = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_hr/cases_timeseries_hr.csv")
df_cases.rename(columns={"date_report":"date"},inplace=True)
else:
return "Record not found", 404
if loc == 'canada':
df_mortality = mortality_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/mortality_timeseries_canada.csv")
df_mortality.rename(columns={"date_death_report":"date"},inplace=True)
elif loc == 'prov' or loc in province.keys():
df_mortality = mortality_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/mortality_timeseries_prov.csv")
df_mortality.rename(columns={"date_death_report":"date"},inplace=True)
elif loc == 'hr' or loc in health_region.keys():
df_mortality = mortality_hr = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_hr/mortality_timeseries_hr.csv")
df_mortality.rename(columns={"date_death_report":"date"},inplace=True)
else:
return "Record not found", 404
if loc == 'canada':
df_recovered = recovered_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/recovered_timeseries_canada.csv")
df_recovered.rename(columns={"date_recovered":"date"},inplace=True)
elif loc == 'prov' or loc in province.keys():
df_recovered = recovered_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/recovered_timeseries_prov.csv")
df_recovered.rename(columns={"date_recovered":"date"},inplace=True)
if loc == 'canada':
df_testing = testing_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/testing_timeseries_canada.csv")
df_testing.rename(columns={"date_testing":"date"},inplace=True)
elif loc == 'prov' or loc in province.keys():
df_testing = testing_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/testing_timeseries_prov.csv")
df_testing.rename(columns={"date_testing":"date"},inplace=True)
if loc == 'canada':
df_active = active_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/active_timeseries_canada.csv")
df_active.rename(columns={"date_active":"date"},inplace=True)
df_active = df_active[['province', 'date', 'active_cases', 'active_cases_change']]
elif loc == 'prov' or loc in province.keys():
df_active = active_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/active_timeseries_prov.csv")
df_active.rename(columns={"date_active":"date"},inplace=True)
df_active = df_active[['province', 'date', 'active_cases', 'active_cases_change']]
if loc == 'canada':
df_avaccine = avaccine_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/vaccine_administration_timeseries_canada.csv")
df_avaccine.rename(columns={"date_vaccine_administered":"date"},inplace=True)
elif loc == 'prov' or loc in province.keys():
df_avaccine = avaccine_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/vaccine_administration_timeseries_prov.csv")
df_avaccine.rename(columns={"date_vaccine_administered":"date"},inplace=True)
if loc == 'canada':
df_dvaccine = dvaccine_can = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_canada/vaccine_distribution_timeseries_canada.csv")
df_dvaccine.rename(columns={"date_vaccine_distributed":"date"},inplace=True)
elif loc == 'prov' or loc in province.keys():
df_dvaccine = dvaccine_prov = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/timeseries_prov/vaccine_distribution_timeseries_prov.csv")
df_dvaccine.rename(columns={"date_vaccine_distributed":"date"},inplace=True)
if loc == 'hr' or loc in health_region.keys():
df_tomerge = [df_cases, df_mortality]
df_final = reduce(lambda left, right: pd.merge(left, right, on=['date', 'province', 'health_region'], how='outer'), df_tomerge)
else:
df_tomerge = [df_cases, df_mortality, df_recovered, df_testing, df_active, df_avaccine, df_dvaccine]
df_final = reduce(lambda left, right: pd.merge(left, right, on=['date', 'province'], how='outer'), df_tomerge)
df_final['date'] = pd.to_datetime(df_final['date'], dayfirst=True)
df = df_final.fillna("NULL")
if loc:
if loc == 'canada':
df
elif loc == 'prov':
df
elif loc == 'hr':
df
elif loc in province.keys():
df = df.loc[df.province == province[loc]['province']]
elif loc in health_region.keys():
df = df.loc[df.health_region == health_region[loc]['health_region']]
if loc != '9999':
df = df.loc[df.province == health_region[loc]['province']]
if date:
df = df.loc[df.date == date]
if after:
df = df.loc[df.date >= after]
if before:
df = df.loc[df.date <= before]
if version:
if version=='true':
version = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/update_time.txt", sep="\t", header=None)
response["version"] = version.head().values[0][0]
if ymd and ymd=='true':
df['date'] = df.date.dt.strftime('%Y-%m-%d')
else:
df['date'] = df.date.dt.strftime('%d-%m-%Y')
response["summary"] = df.to_dict(orient='records')
return response
@app.route('/version')
def version():
response = {}
version = pd.read_csv("https://cdn.jsdelivr.net/gh/ishaberry/Covid19Canada/update_time.txt", sep="\t", header=None)
response["version"] = version.head().values[0][0]
return response
| 54.10303
| 228
| 0.639577
| 4,530
| 35,708
| 4.825828
| 0.03223
| 0.073464
| 0.029642
| 0.04611
| 0.940671
| 0.937286
| 0.931934
| 0.918439
| 0.89909
| 0.890673
| 0
| 0.00725
| 0.219727
| 35,708
| 659
| 229
| 54.185129
| 0.777367
| 0.137224
| 0
| 0.774327
| 0
| 0.00207
| 0.320844
| 0.028735
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010352
| false
| 0
| 0.012422
| 0.00207
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2f86eb0a1b30eadc192290373baf574ec30125a
| 128
|
py
|
Python
|
tests/services/blockchain_follower/handlers/test_resteem.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | null | null | null |
tests/services/blockchain_follower/handlers/test_resteem.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | null | null | null |
tests/services/blockchain_follower/handlers/test_resteem.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from yo.services.blockchain_follower.handlers import handle_repost
def test_handle_repost():
pass
| 18.285714
| 66
| 0.742188
| 17
| 128
| 5.352941
| 0.882353
| 0.263736
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009091
| 0.140625
| 128
| 6
| 67
| 21.333333
| 0.818182
| 0.164063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.