hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79d4bd1374601b3b67fc39c4b2924659ec211e61
| 43,928
|
py
|
Python
|
libs/python/qumranica/api/text_api.py
|
Scripta-Qumranica-Electronica/SQE_API_Connectors
|
aaa9b9eb8709d4257c32ea57321a179c6b1e041a
|
[
"MIT"
] | null | null | null |
libs/python/qumranica/api/text_api.py
|
Scripta-Qumranica-Electronica/SQE_API_Connectors
|
aaa9b9eb8709d4257c32ea57321a179c6b1e041a
|
[
"MIT"
] | null | null | null |
libs/python/qumranica/api/text_api.py
|
Scripta-Qumranica-Electronica/SQE_API_Connectors
|
aaa9b9eb8709d4257c32ea57321a179c6b1e041a
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
SQE API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from qumranica.api_client import ApiClient
from qumranica.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class TextApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def v1_editions_edition_id_lines_line_id_get(self, edition_id, line_id, **kwargs): # noqa: E501
"""Retrieves all signs and their data from the given line # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_lines_line_id_get(edition_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param int line_id: Id of the line (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: LineTextDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v1_editions_edition_id_lines_line_id_get_with_http_info(edition_id, line_id, **kwargs) # noqa: E501
def v1_editions_edition_id_lines_line_id_get_with_http_info(self, edition_id, line_id, **kwargs): # noqa: E501
"""Retrieves all signs and their data from the given line # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_lines_line_id_get_with_http_info(edition_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param int line_id: Id of the line (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(LineTextDTO, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'edition_id',
'line_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_editions_edition_id_lines_line_id_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'edition_id' is set
if self.api_client.client_side_validation and ('edition_id' not in local_var_params or # noqa: E501
local_var_params['edition_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `edition_id` when calling `v1_editions_edition_id_lines_line_id_get`") # noqa: E501
# verify the required parameter 'line_id' is set
if self.api_client.client_side_validation and ('line_id' not in local_var_params or # noqa: E501
local_var_params['line_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `line_id` when calling `v1_editions_edition_id_lines_line_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edition_id' in local_var_params:
path_params['editionId'] = local_var_params['edition_id'] # noqa: E501
if 'line_id' in local_var_params:
path_params['lineId'] = local_var_params['line_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/editions/{editionId}/lines/{lineId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LineTextDTO', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_editions_edition_id_text_fragments_get(self, edition_id, **kwargs): # noqa: E501
"""Retrieves the ids of all Fragments of all fragments in the given edition of a scroll # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_get(edition_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TextFragmentDataListDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v1_editions_edition_id_text_fragments_get_with_http_info(edition_id, **kwargs) # noqa: E501
def v1_editions_edition_id_text_fragments_get_with_http_info(self, edition_id, **kwargs): # noqa: E501
"""Retrieves the ids of all Fragments of all fragments in the given edition of a scroll # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_get_with_http_info(edition_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TextFragmentDataListDTO, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'edition_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_editions_edition_id_text_fragments_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'edition_id' is set
if self.api_client.client_side_validation and ('edition_id' not in local_var_params or # noqa: E501
local_var_params['edition_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `edition_id` when calling `v1_editions_edition_id_text_fragments_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edition_id' in local_var_params:
path_params['editionId'] = local_var_params['edition_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/editions/{editionId}/text-fragments', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TextFragmentDataListDTO', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_editions_edition_id_text_fragments_post(self, edition_id, **kwargs): # noqa: E501
"""Creates a new text fragment in the given edition of a scroll # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_post(edition_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param CreateTextFragmentDTO create_text_fragment_dto: A JSON object with the details of the new text fragment to be created
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TextFragmentDataDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v1_editions_edition_id_text_fragments_post_with_http_info(edition_id, **kwargs) # noqa: E501
def v1_editions_edition_id_text_fragments_post_with_http_info(self, edition_id, **kwargs): # noqa: E501
"""Creates a new text fragment in the given edition of a scroll # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_post_with_http_info(edition_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param CreateTextFragmentDTO create_text_fragment_dto: A JSON object with the details of the new text fragment to be created
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TextFragmentDataDTO, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'edition_id',
'create_text_fragment_dto'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_editions_edition_id_text_fragments_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'edition_id' is set
if self.api_client.client_side_validation and ('edition_id' not in local_var_params or # noqa: E501
local_var_params['edition_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `edition_id` when calling `v1_editions_edition_id_text_fragments_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edition_id' in local_var_params:
path_params['editionId'] = local_var_params['edition_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_text_fragment_dto' in local_var_params:
body_params = local_var_params['create_text_fragment_dto']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/editions/{editionId}/text-fragments', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TextFragmentDataDTO', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_editions_edition_id_text_fragments_text_fragment_id_artefacts_get(self, edition_id, text_fragment_id, **kwargs): # noqa: E501
"""Retrieves the ids of all Artefacts in the given textFragmentName # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_text_fragment_id_artefacts_get(edition_id, text_fragment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param int text_fragment_id: Id of the text fragment (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArtefactDataListDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v1_editions_edition_id_text_fragments_text_fragment_id_artefacts_get_with_http_info(edition_id, text_fragment_id, **kwargs) # noqa: E501
def v1_editions_edition_id_text_fragments_text_fragment_id_artefacts_get_with_http_info(self, edition_id, text_fragment_id, **kwargs): # noqa: E501
"""Retrieves the ids of all Artefacts in the given textFragmentName # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_text_fragment_id_artefacts_get_with_http_info(edition_id, text_fragment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param int text_fragment_id: Id of the text fragment (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArtefactDataListDTO, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'edition_id',
'text_fragment_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_editions_edition_id_text_fragments_text_fragment_id_artefacts_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'edition_id' is set
if self.api_client.client_side_validation and ('edition_id' not in local_var_params or # noqa: E501
local_var_params['edition_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `edition_id` when calling `v1_editions_edition_id_text_fragments_text_fragment_id_artefacts_get`") # noqa: E501
# verify the required parameter 'text_fragment_id' is set
if self.api_client.client_side_validation and ('text_fragment_id' not in local_var_params or # noqa: E501
local_var_params['text_fragment_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `text_fragment_id` when calling `v1_editions_edition_id_text_fragments_text_fragment_id_artefacts_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edition_id' in local_var_params:
path_params['editionId'] = local_var_params['edition_id'] # noqa: E501
if 'text_fragment_id' in local_var_params:
path_params['textFragmentId'] = local_var_params['text_fragment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/editions/{editionId}/text-fragments/{textFragmentId}/artefacts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArtefactDataListDTO', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_editions_edition_id_text_fragments_text_fragment_id_get(self, edition_id, text_fragment_id, **kwargs): # noqa: E501
"""Retrieves all signs and their data from the given textFragmentName # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_text_fragment_id_get(edition_id, text_fragment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param int text_fragment_id: Id of the text fragment (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TextEditionDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v1_editions_edition_id_text_fragments_text_fragment_id_get_with_http_info(edition_id, text_fragment_id, **kwargs) # noqa: E501
def v1_editions_edition_id_text_fragments_text_fragment_id_get_with_http_info(self, edition_id, text_fragment_id, **kwargs): # noqa: E501
"""Retrieves all signs and their data from the given textFragmentName # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_text_fragment_id_get_with_http_info(edition_id, text_fragment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param int text_fragment_id: Id of the text fragment (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TextEditionDTO, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'edition_id',
'text_fragment_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_editions_edition_id_text_fragments_text_fragment_id_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'edition_id' is set
if self.api_client.client_side_validation and ('edition_id' not in local_var_params or # noqa: E501
local_var_params['edition_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `edition_id` when calling `v1_editions_edition_id_text_fragments_text_fragment_id_get`") # noqa: E501
# verify the required parameter 'text_fragment_id' is set
if self.api_client.client_side_validation and ('text_fragment_id' not in local_var_params or # noqa: E501
local_var_params['text_fragment_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `text_fragment_id` when calling `v1_editions_edition_id_text_fragments_text_fragment_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edition_id' in local_var_params:
path_params['editionId'] = local_var_params['edition_id'] # noqa: E501
if 'text_fragment_id' in local_var_params:
path_params['textFragmentId'] = local_var_params['text_fragment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/editions/{editionId}/text-fragments/{textFragmentId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TextEditionDTO', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_editions_edition_id_text_fragments_text_fragment_id_lines_get(self, edition_id, text_fragment_id, **kwargs): # noqa: E501
"""Retrieves the ids of all lines in the given textFragmentName # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_text_fragment_id_lines_get(edition_id, text_fragment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param int text_fragment_id: Id of the text fragment (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: LineDataListDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v1_editions_edition_id_text_fragments_text_fragment_id_lines_get_with_http_info(edition_id, text_fragment_id, **kwargs) # noqa: E501
def v1_editions_edition_id_text_fragments_text_fragment_id_lines_get_with_http_info(self, edition_id, text_fragment_id, **kwargs): # noqa: E501
"""Retrieves the ids of all lines in the given textFragmentName # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_text_fragment_id_lines_get_with_http_info(edition_id, text_fragment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Id of the edition (required)
:param int text_fragment_id: Id of the text fragment (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(LineDataListDTO, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'edition_id',
'text_fragment_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_editions_edition_id_text_fragments_text_fragment_id_lines_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'edition_id' is set
if self.api_client.client_side_validation and ('edition_id' not in local_var_params or # noqa: E501
local_var_params['edition_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `edition_id` when calling `v1_editions_edition_id_text_fragments_text_fragment_id_lines_get`") # noqa: E501
# verify the required parameter 'text_fragment_id' is set
if self.api_client.client_side_validation and ('text_fragment_id' not in local_var_params or # noqa: E501
local_var_params['text_fragment_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `text_fragment_id` when calling `v1_editions_edition_id_text_fragments_text_fragment_id_lines_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edition_id' in local_var_params:
path_params['editionId'] = local_var_params['edition_id'] # noqa: E501
if 'text_fragment_id' in local_var_params:
path_params['textFragmentId'] = local_var_params['text_fragment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/editions/{editionId}/text-fragments/{textFragmentId}/lines', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LineDataListDTO', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_editions_edition_id_text_fragments_text_fragment_id_put(self, edition_id, text_fragment_id, **kwargs): # noqa: E501
"""Updates the specified text fragment with the submitted properties # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_text_fragment_id_put(edition_id, text_fragment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Edition of the text fragment being updates (required)
:param int text_fragment_id: Id of the text fragment being updates (required)
:param UpdateTextFragmentDTO update_text_fragment_dto: Details of the updated text fragment
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TextFragmentDataDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v1_editions_edition_id_text_fragments_text_fragment_id_put_with_http_info(edition_id, text_fragment_id, **kwargs) # noqa: E501
def v1_editions_edition_id_text_fragments_text_fragment_id_put_with_http_info(self, edition_id, text_fragment_id, **kwargs): # noqa: E501
"""Updates the specified text fragment with the submitted properties # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_editions_edition_id_text_fragments_text_fragment_id_put_with_http_info(edition_id, text_fragment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int edition_id: Edition of the text fragment being updates (required)
:param int text_fragment_id: Id of the text fragment being updates (required)
:param UpdateTextFragmentDTO update_text_fragment_dto: Details of the updated text fragment
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TextFragmentDataDTO, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'edition_id',
'text_fragment_id',
'update_text_fragment_dto'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_editions_edition_id_text_fragments_text_fragment_id_put" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'edition_id' is set
if self.api_client.client_side_validation and ('edition_id' not in local_var_params or # noqa: E501
local_var_params['edition_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `edition_id` when calling `v1_editions_edition_id_text_fragments_text_fragment_id_put`") # noqa: E501
# verify the required parameter 'text_fragment_id' is set
if self.api_client.client_side_validation and ('text_fragment_id' not in local_var_params or # noqa: E501
local_var_params['text_fragment_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `text_fragment_id` when calling `v1_editions_edition_id_text_fragments_text_fragment_id_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edition_id' in local_var_params:
path_params['editionId'] = local_var_params['edition_id'] # noqa: E501
if 'text_fragment_id' in local_var_params:
path_params['textFragmentId'] = local_var_params['text_fragment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_text_fragment_dto' in local_var_params:
body_params = local_var_params['update_text_fragment_dto']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/editions/{editionId}/text-fragments/{textFragmentId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TextFragmentDataDTO', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 49.636158
| 182
| 0.622268
| 5,100
| 43,928
| 5.042353
| 0.041373
| 0.053196
| 0.058796
| 0.039897
| 0.964886
| 0.963719
| 0.963719
| 0.962319
| 0.960686
| 0.958625
| 0
| 0.014267
| 0.309097
| 43,928
| 884
| 183
| 49.692308
| 0.833048
| 0.431137
| 0
| 0.737819
| 1
| 0
| 0.227448
| 0.090254
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034803
| false
| 0
| 0.011601
| 0
| 0.081207
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8df3dd75e110b36aa383d4bad01814cabd8ee178
| 34
|
py
|
Python
|
dqc_parser/__init__.py
|
qis-unipr/dqc-circuit
|
16281861957a50510a3a66e959096e41de872e9c
|
[
"Apache-2.0"
] | null | null | null |
dqc_parser/__init__.py
|
qis-unipr/dqc-circuit
|
16281861957a50510a3a66e959096e41de872e9c
|
[
"Apache-2.0"
] | null | null | null |
dqc_parser/__init__.py
|
qis-unipr/dqc-circuit
|
16281861957a50510a3a66e959096e41de872e9c
|
[
"Apache-2.0"
] | null | null | null |
from .ast_to_dag import ast_to_dag
| 34
| 34
| 0.882353
| 8
| 34
| 3.25
| 0.625
| 0.384615
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
8df45b98b509be4d28210aa0ae247b4ba244817c
| 156
|
py
|
Python
|
com/suyuening/python/chapter2/Tuple.py
|
infoplat/BeginningPython
|
64d7ba3ff3f705a2b531eaa6eb4c83d5fcf4c36a
|
[
"Apache-2.0"
] | null | null | null |
com/suyuening/python/chapter2/Tuple.py
|
infoplat/BeginningPython
|
64d7ba3ff3f705a2b531eaa6eb4c83d5fcf4c36a
|
[
"Apache-2.0"
] | null | null | null |
com/suyuening/python/chapter2/Tuple.py
|
infoplat/BeginningPython
|
64d7ba3ff3f705a2b531eaa6eb4c83d5fcf4c36a
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
'''
Created on 2015年11月2日
@author: 岳宁
'''
print 1, 2, 3
print (1, 2, 3)
print 42,
print 3 * (40 + 2)
print 3 * (40, +2)
print 3 * (40 + 2,)
| 12
| 21
| 0.557692
| 30
| 156
| 2.9
| 0.466667
| 0.206897
| 0.275862
| 0.310345
| 0.551724
| 0.310345
| 0.310345
| 0.310345
| 0
| 0
| 0
| 0.235294
| 0.237179
| 156
| 12
| 22
| 13
| 0.495798
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
5c2c616c946b8cf764d110b2438cfa1415693b0a
| 265
|
py
|
Python
|
live_cd_scripts/windows_utilman.py
|
ForbiddenApplePy/applepy
|
4eb0965f7f634b0f340beee54dce09c12e3e4f54
|
[
"WTFPL"
] | null | null | null |
live_cd_scripts/windows_utilman.py
|
ForbiddenApplePy/applepy
|
4eb0965f7f634b0f340beee54dce09c12e3e4f54
|
[
"WTFPL"
] | null | null | null |
live_cd_scripts/windows_utilman.py
|
ForbiddenApplePy/applepy
|
4eb0965f7f634b0f340beee54dce09c12e3e4f54
|
[
"WTFPL"
] | null | null | null |
import os
def utilman():
os.system(
'mv /mnt/targetDrive/Windows/System32/Utilman.exe /mnt/targetDrive/Windows/System32/Utilman.bak')
os.system(
'mv /mnt/targetDrive/Windows/System32/cmd.exe /mnt/targetDrive/Windows/System32/Utilman.exe')
| 29.444444
| 105
| 0.716981
| 34
| 265
| 5.588235
| 0.382353
| 0.294737
| 0.442105
| 0.610526
| 0.873684
| 0.873684
| 0.410526
| 0
| 0
| 0
| 0
| 0.035242
| 0.143396
| 265
| 8
| 106
| 33.125
| 0.801762
| 0
| 0
| 0.333333
| 0
| 0.333333
| 0.69434
| 0.664151
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3090400b9d6de48217c570a38a209fecb1b3db1b
| 1,697
|
py
|
Python
|
tests/test_fleets_views.py
|
danilomarto/vltour-api
|
e9881d9e7b9ea0e890bc4d1f7efd12d08542e343
|
[
"MIT"
] | null | null | null |
tests/test_fleets_views.py
|
danilomarto/vltour-api
|
e9881d9e7b9ea0e890bc4d1f7efd12d08542e343
|
[
"MIT"
] | null | null | null |
tests/test_fleets_views.py
|
danilomarto/vltour-api
|
e9881d9e7b9ea0e890bc4d1f7efd12d08542e343
|
[
"MIT"
] | null | null | null |
from django.test import SimpleTestCase
from django.urls import reverse
class TestListFleet(SimpleTestCase):
def setUp(self):
self.request = self.client.get(reverse("fleets:list_fleet"))
def test_status_code(self) -> None:
self.assertEqual(self.request.status_code, 200)
def test_template_used(self) -> None:
self.assertTemplateUsed(self.request, "pages/list_all_cars.html")
class TestListOnlyCars(SimpleTestCase):
def setUp(self):
self.request = self.client.get(reverse("fleets:list_only_cars"))
def test_status_code(self) -> None:
self.assertEqual(self.request.status_code, 200)
class TestListOnlyBus(SimpleTestCase):
def setUp(self):
self.request = self.client.get(reverse("fleets:list_only_bus"))
def test_status_code(self) -> None:
self.assertEqual(self.request.status_code, 200)
def test_template_used(self) -> None:
self.assertTemplateUsed(self.request, "pages/list_all_cars.html")
class TestListOnlyMicros(SimpleTestCase):
def setUp(self):
self.request = self.client.get(reverse("fleets:list_only_micros"))
def test_status_code(self) -> None:
self.assertEqual(self.request.status_code, 200)
def test_template_used(self) -> None:
self.assertTemplateUsed(self.request, "pages/list_all_cars.html")
class TestListOnlyVan(SimpleTestCase):
def setUp(self):
self.request = self.client.get(reverse("fleets:list_only_van"))
def test_status_code(self) -> None:
self.assertEqual(self.request.status_code, 200)
def test_template_used(self) -> None:
self.assertTemplateUsed(self.request, "pages/list_all_cars.html")
| 30.854545
| 74
| 0.715969
| 214
| 1,697
| 5.490654
| 0.17757
| 0.131064
| 0.091915
| 0.110638
| 0.85617
| 0.85617
| 0.85617
| 0.85617
| 0.85617
| 0.85617
| 0
| 0.010616
| 0.167354
| 1,697
| 54
| 75
| 31.425926
| 0.820948
| 0
| 0
| 0.657143
| 0
| 0
| 0.116087
| 0.082499
| 0
| 0
| 0
| 0
| 0.257143
| 1
| 0.4
| false
| 0
| 0.057143
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
30f9104c1e10aa93d505bc8a1d718b70a295974a
| 148
|
py
|
Python
|
gym/envs/UAV/__init__.py
|
zachary2wave/UAV-aid-communication
|
801fe22d839261af43127e31db00f166ed6484a0
|
[
"MIT"
] | 2
|
2019-12-02T15:36:55.000Z
|
2022-02-10T11:55:10.000Z
|
gym/envs/UAV/__init__.py
|
zachary2wave/UAV-aid-communication
|
801fe22d839261af43127e31db00f166ed6484a0
|
[
"MIT"
] | null | null | null |
gym/envs/UAV/__init__.py
|
zachary2wave/UAV-aid-communication
|
801fe22d839261af43127e31db00f166ed6484a0
|
[
"MIT"
] | 2
|
2019-12-10T00:10:38.000Z
|
2021-01-07T03:31:38.000Z
|
from gym.envs.UAV.downlink import Downlink
from gym.envs.UAV.downlink_2d import Downlink_2d
from gym.envs.UAV.downlink_2d_v2 import Downlink_2d_v2
| 29.6
| 54
| 0.851351
| 27
| 148
| 4.444444
| 0.296296
| 0.333333
| 0.275
| 0.35
| 0.583333
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0.044444
| 0.087838
| 148
| 4
| 55
| 37
| 0.844444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
eb71d40fc3ce941a8ded242e488e0e749e2dde23
| 2,905
|
py
|
Python
|
src/bpp/migrations/0030_auto_20150923_1351.py
|
iplweb/django-bpp
|
85f183a99d8d5027ae4772efac1e4a9f21675849
|
[
"BSD-3-Clause"
] | 1
|
2017-04-27T19:50:02.000Z
|
2017-04-27T19:50:02.000Z
|
src/bpp/migrations/0030_auto_20150923_1351.py
|
mpasternak/django-bpp
|
434338821d5ad1aaee598f6327151aba0af66f5e
|
[
"BSD-3-Clause"
] | 41
|
2019-11-07T00:07:02.000Z
|
2022-02-27T22:09:39.000Z
|
src/bpp/migrations/0030_auto_20150923_1351.py
|
iplweb/bpp
|
f027415cc3faf1ca79082bf7bacd4be35b1a6fdf
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('bpp', '0029_auto_20150923_1343'),
]
operations = [
migrations.AddField(
model_name='zrodlo',
name='openaccess_tryb_dostepu',
field=models.CharField(blank=True, max_length=50, verbose_name=b'OpenAccess: tryb dost\xc4\x99pu', db_index=True, choices=[(b'FULL', b'pe\xc5\x82ny'), (b'PARTIAL', b'cz\xc4\x99\xc5\x9bciowy')]),
preserve_default=True,
),
migrations.AlterField(
model_name='wydawnictwo_ciagle',
name='openaccess_czas_publikacji',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name=b'OpenAccess: czas udost\xc4\x99pnienia', blank=True, to='bpp.Czas_Udostepnienia_OpenAccess', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='wydawnictwo_ciagle',
name='openaccess_licencja',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name=b'OpenAccess: licencja', blank=True, to='bpp.Licencja_OpenAccess', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='wydawnictwo_ciagle',
name='openaccess_tryb_dostepu',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name=b'OpenAccess: tryb dost\xc4\x99pu', blank=True, to='bpp.Tryb_OpenAccess_Wydawnictwo_Ciagle', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='wydawnictwo_ciagle',
name='openaccess_wersja_tekstu',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name=b'OpenAccess: wersja tekstu', blank=True, to='bpp.Wersja_Tekstu_OpenAccess', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='wydawnictwo_zwarte',
name='openaccess_czas_publikacji',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name=b'OpenAccess: czas udost\xc4\x99pnienia', blank=True, to='bpp.Czas_Udostepnienia_OpenAccess', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='wydawnictwo_zwarte',
name='openaccess_licencja',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name=b'OpenAccess: licencja', blank=True, to='bpp.Licencja_OpenAccess', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='wydawnictwo_zwarte',
name='openaccess_wersja_tekstu',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name=b'OpenAccess: wersja tekstu', blank=True, to='bpp.Wersja_Tekstu_OpenAccess', null=True),
preserve_default=True,
),
]
| 46.111111
| 206
| 0.656799
| 312
| 2,905
| 5.891026
| 0.214744
| 0.039173
| 0.052231
| 0.095756
| 0.841676
| 0.841676
| 0.816104
| 0.816104
| 0.786725
| 0.786725
| 0
| 0.01738
| 0.227539
| 2,905
| 62
| 207
| 46.854839
| 0.801693
| 0.007229
| 0
| 0.8
| 0
| 0
| 0.284525
| 0.138099
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018182
| 0
| 0.072727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb7274d3f1c880c72d7fac2d3ed3b3694d90383b
| 2,286
|
py
|
Python
|
client.py
|
gitcordier/stack_qalculator
|
08b6615a32fff6f109b190d40e3688f4eebf6366
|
[
"MIT"
] | null | null | null |
client.py
|
gitcordier/stack_qalculator
|
08b6615a32fff6f109b190d40e3688f4eebf6366
|
[
"MIT"
] | null | null | null |
client.py
|
gitcordier/stack_qalculator
|
08b6615a32fff6f109b190d40e3688f4eebf6366
|
[
"MIT"
] | null | null | null |
import requests
# Get all operators
#'/rpn/op', method = 'GET'
get = requests.get('http://localhost:5000/rpn/op')
print('Get all operators:\n ', get.text)
# CREATE stacks
#'/rpn/stack', method = POST (1)
print('Create stacks')
post = requests.post('http://localhost:5000/rpn/stack')
print(post.text)
post = requests.post('http://localhost:5000/rpn/stack')
print(post.text)
post = requests.post('http://localhost:5000/rpn/stack')
print(post.text)
post = requests.post('http://localhost:5000/rpn/stack')
print(post.text)
# Get all stacks
#'/rpn/stack', method = GET
get = requests.get('http://localhost:5000/rpn/stack')
print('Get all stacks:\n', get.text)
# Update a given stack
#'/rpn/stack', method = POST (2)
post = requests.post('http://localhost:5000/rpn/stack', data = {'stack_id': 2, 'value': 10})
print(post.text)
post = requests.post('http://localhost:5000/rpn/stack', data = {'stack_id': 2, 'value': 5})
print(post.text)
post = requests.post('http://localhost:5000/rpn/stack', data = {'stack_id': 2, 'value': 6})
print(post.text)
# Get all stacks
#'/rpn/stack', method = GET
get = requests.get('http://localhost:5000/rpn/stack')
print('Get all stacks:\n', get.text)
# Compute
# /rpn/stack/<stack_id>', method = GET
get = requests.get('http://localhost:5000/rpn/stack/2', params = {'op': '+-'})
print('Computation(s) within a stack:\n', get.text)
# Get all stacks
#'/rpn/stack', method = GET
get = requests.get('http://localhost:5000/rpn/stack')
print('Get all stacks:\n', get.text)
# Get a specific stack
#'/rpn/stack', method = GET
get = requests.get('http://localhost:5000/rpn/stack', params = {'stack_id': 2})
print('Get a specific stack:\n', get.text)
# Empty stack
# /rpn/stack/<stack_id>', method = PUT
put = requests.put('http://localhost:5000/rpn/stack/2')
print('Empty a given stack:\n', put.text)
# Get all stacks
#'/rpn/stack', method = GET
get = requests.get('http://localhost:5000/rpn/stack')
print('Get all stacks:\n', get.text)
# Delete stack
# #'/rpn/stack', method = DELETE
delete = requests.delete('http://localhost:5000/rpn/stack', params = {'stack_id': 2})
print('Delete the stack:\n', delete.text)
# Get all stacks
#'/rpn/stack', method = GET
get = requests.get('http://localhost:5000/rpn/stack')
print('Get all stacks:\n', get.text)
#
#END
| 29.307692
| 92
| 0.680227
| 354
| 2,286
| 4.372881
| 0.112994
| 0.139535
| 0.186693
| 0.219638
| 0.761628
| 0.738372
| 0.72093
| 0.72093
| 0.72093
| 0.693152
| 0
| 0.039803
| 0.109799
| 2,286
| 77
| 93
| 29.688312
| 0.720885
| 0.232283
| 0
| 0.583333
| 0
| 0
| 0.464658
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027778
| 0
| 0.027778
| 0.5
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
69076d1a106b316982959d69e957201c3eb9212b
| 14,898
|
py
|
Python
|
sana_pchr/migrations/0001_initial.py
|
SanaMobile/sana.pchr.oss-web
|
2b2fd75a1730f1743e28b4499bb1ba76fa100970
|
[
"BSD-3-Clause"
] | null | null | null |
sana_pchr/migrations/0001_initial.py
|
SanaMobile/sana.pchr.oss-web
|
2b2fd75a1730f1743e28b4499bb1ba76fa100970
|
[
"BSD-3-Clause"
] | null | null | null |
sana_pchr/migrations/0001_initial.py
|
SanaMobile/sana.pchr.oss-web
|
2b2fd75a1730f1743e28b4499bb1ba76fa100970
|
[
"BSD-3-Clause"
] | 2
|
2018-06-07T21:54:08.000Z
|
2018-07-11T20:40:19.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from sana_pchr.models.fields import *
from sana_pchr.crypto import Credential
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Clinic',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('name', models.CharField(max_length=45)),
('longitude', models.FloatField(null=True, blank=True)),
('latitude', models.FloatField(null=True, blank=True)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Clinic_Physician',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('clinic', models.ForeignKey(db_column='clinic_uuid', to='sana_pchr.Clinic')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Device',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('name', models.CharField(max_length=45)),
('deviceMAC', models.CharField(max_length=17)),
('key', CredentialField(default=Credential.generate, editable=False)),
('token', models.CharField(max_length=32, default=DefaultFuncs.make_uuid)),
('clinic', models.ForeignKey(db_column='clinic_uuid', blank=True, null=True, to='sana_pchr.Clinic')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Encounter',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='EncounterCategory',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('displayName', models.CharField(max_length=45)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Patient',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('firstName', models.CharField(max_length=45)),
('lastName', models.CharField(blank=True, max_length=45)),
('UNHCR', models.CharField(blank=True, max_length=45)),
('birthYear', models.CharField(blank=True, max_length=45)),
('birthCity', models.CharField(blank=True, max_length=45)),
('picture', models.ImageField(upload_to='', blank=True)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Patient_Physician',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('patient', models.ForeignKey(db_column='patient_uuid', to='sana_pchr.Patient')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Physician',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('firstName', models.CharField(max_length=45)),
('lastName', models.CharField(max_length=45)),
('picture', models.ImageField(upload_to='', blank=True)),
('hashedPIN', models.CharField(max_length=128)),
('email', models.EmailField(blank=True, max_length=254)),
('phone', models.CharField(blank=True, max_length=45)),
('recovery_question', models.CharField(blank=True, max_length=45)),
('recovery_answer', models.CharField(blank=True, max_length=45)),
('recovery_key', DerivedCredentialField(null=True, blank=True, editable=False, max_length=128)),
('key', CredentialField(default=Credential.generate, editable=False)),
('clinics', models.ManyToManyField(to='sana_pchr.Clinic', through='sana_pchr.Clinic_Physician')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Record',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('value', models.CharField(max_length=45)),
('comment', models.TextField(blank=True)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RecordCategory',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('displayName', models.CharField(max_length=45)),
('recordType', models.IntegerField()),
('resultDataType', models.CharField(blank=True, max_length=45)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Test',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('result', models.CharField(max_length=45)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TestCategory',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('displayName', models.CharField(max_length=45)),
('resultType', models.IntegerField()),
('resultMin', models.FloatField(blank=True, null=True)),
('resultMax', models.FloatField(blank=True, null=True)),
('resultUnits', models.CharField(blank=True, max_length=45)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Visit',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='VisitCategory',
fields=[
('uuid', UUIDField(primary_key=True, max_length=36, serialize=False, default=DefaultFuncs.make_uuid)),
('created', models.DateTimeField(default=DefaultFuncs.getNow)),
('updated', models.DateTimeField(default=DefaultFuncs.getNow)),
('synchronized', models.DateTimeField(default=DefaultFuncs.getNow)),
('displayName', models.CharField(max_length=45)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.AddField(
model_name='visit',
name='category',
field=models.ForeignKey(db_column='category_uuid', to='sana_pchr.VisitCategory'),
preserve_default=True,
),
migrations.AddField(
model_name='visit',
name='patient',
field=models.ForeignKey(db_column='patient_uuid', to='sana_pchr.Patient'),
preserve_default=True,
),
migrations.AddField(
model_name='test',
name='category',
field=models.ForeignKey(db_column='category_uuid', to='sana_pchr.TestCategory'),
preserve_default=True,
),
migrations.AddField(
model_name='test',
name='encounter',
field=models.ForeignKey(db_column='encounter_uuid', to='sana_pchr.Encounter'),
preserve_default=True,
),
migrations.AddField(
model_name='record',
name='category',
field=models.ForeignKey(db_column='category_uuid', to='sana_pchr.RecordCategory'),
preserve_default=True,
),
migrations.AddField(
model_name='record',
name='encounter',
field=models.ForeignKey(db_column='encounter_uuid', to='sana_pchr.Encounter'),
preserve_default=True,
),
migrations.AddField(
model_name='patient_physician',
name='physician',
field=models.ForeignKey(db_column='physician_uuid', to='sana_pchr.Physician'),
preserve_default=True,
),
migrations.AddField(
model_name='patient',
name='physicians',
field=models.ManyToManyField(to='sana_pchr.Physician', through='sana_pchr.Patient_Physician'),
preserve_default=True,
),
migrations.AddField(
model_name='encounter',
name='category',
field=models.ForeignKey(db_column='category_uuid', to='sana_pchr.EncounterCategory'),
preserve_default=True,
),
migrations.AddField(
model_name='encounter',
name='clinic',
field=models.ForeignKey(db_column='clinic_uuid', to='sana_pchr.Clinic'),
preserve_default=True,
),
migrations.AddField(
model_name='encounter',
name='device',
field=models.ForeignKey(db_column='device_uuid', to='sana_pchr.Device'),
preserve_default=True,
),
migrations.AddField(
model_name='encounter',
name='physician',
field=models.ForeignKey(db_column='physician_uuid', to='sana_pchr.Physician'),
preserve_default=True,
),
migrations.AddField(
model_name='encounter',
name='visit',
field=models.ForeignKey(db_column='visit_uuid', to='sana_pchr.Visit'),
preserve_default=True,
),
migrations.AddField(
model_name='clinic_physician',
name='physician',
field=models.ForeignKey(db_column='physician_uuid', to='sana_pchr.Physician'),
preserve_default=True,
),
migrations.AddField(
model_name='clinic',
name='physicians',
field=models.ManyToManyField(to='sana_pchr.Physician', blank=True, through='sana_pchr.Clinic_Physician'),
preserve_default=True,
),
]
| 45.420732
| 118
| 0.565445
| 1,254
| 14,898
| 6.58134
| 0.089314
| 0.131225
| 0.132316
| 0.193384
| 0.884042
| 0.861626
| 0.842966
| 0.804435
| 0.78008
| 0.710893
| 0
| 0.007895
| 0.302859
| 14,898
| 327
| 119
| 45.559633
| 0.786732
| 0.00141
| 0
| 0.741433
| 0
| 0
| 0.126521
| 0.011765
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012461
| 0
| 0.021807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6924dbec2c7b9de2f27f2e7befae5451f7eb5410
| 45,666
|
py
|
Python
|
robin_stocks/orders.py
|
shobnaren/robin_stocks
|
7f15bd3b73da3c01fe164316b259cc03ffab9ea0
|
[
"MIT"
] | 2
|
2020-03-29T07:12:33.000Z
|
2021-01-16T17:27:10.000Z
|
robin_stocks/orders.py
|
shobnaren/robin_stocks
|
7f15bd3b73da3c01fe164316b259cc03ffab9ea0
|
[
"MIT"
] | null | null | null |
robin_stocks/orders.py
|
shobnaren/robin_stocks
|
7f15bd3b73da3c01fe164316b259cc03ffab9ea0
|
[
"MIT"
] | null | null | null |
"""Contains all functions for placing orders for stocks, options, and crypto."""
from uuid import uuid4
import robin_stocks.crypto as crypto
import robin_stocks.helper as helper
import robin_stocks.options as options
import robin_stocks.profiles as profiles
import robin_stocks.stocks as stocks
import robin_stocks.urls as urls
@helper.login_required
def get_all_orders(info=None):
"""Returns a list of all the orders that have been processed for the account.
:param info: Will data_filter the results to get a specific value.
:type info: Optional[str]
:returns: Returns a list of dictionaries of key/value pairs for each order. If info parameter is provided, \
a list of strings is returned where the strings are the value of the key that matches info.
"""
url = urls.orders()
data = helper.request_get(url, 'pagination')
return helper.data_filter(data, info)
@helper.login_required
def get_all_open_orders(info=None):
"""Returns a list of all the orders that are currently open.
:param info: Will data_filter the results to get a specific value.
:type info: Optional[str]
:returns: Returns a list of dictionaries of key/value pairs for each order. If info parameter is provided, \
a list of strings is returned where the strings are the value of the key that matches info.
"""
url = urls.orders()
data = helper.request_get(url, 'pagination')
data = [item for item in data if item['cancel'] is not None]
return helper.data_filter(data, info)
@helper.login_required
def close_all_open_option_positions(symbol=None, _type=None):
"""
*** WIP: WORK IN PROGRESS ***
data_filter by symbol and type
1) cancel all OPEN option orders
2) confirm all OPEN orders cancelled
3) close open "short" options around MID and ASK (target price is median of ASK & MID)
4) ensure all the SHORT options closed
5) close open "long" options around BID and MID
6) ensure all the LONG options closed
:param symbol: Optional - when filtered will close all open positions
:param _type: put or call or None==both
:return:
"""
open_pos = options.get_open_option_positions()
if symbol:
open_pos = filter(lambda x: x.get['chain_symbol'] == symbol, open_pos)
print(open_pos)
cancel_all_open_orders()
print("*** DO NOT USE *** WORK IN PROGRESS ***")
pass
@helper.login_required
def get_all_open_option_orders(info=None):
"""Returns a list of all the orders that are currently open.
:param info: Will data_filter the results to get a specific value.
:type info: Optional[str]
:returns: Returns a list of dictionaries of key/value pairs for each order. If info parameter is provided, \
a list of strings is returned where the strings are the value of the key that matches info.
"""
url = urls.option_orders()
data = helper.request_get(url, 'pagination')
data = [item for item in data if item['cancel_url'] is not None]
return helper.data_filter(data, info)
@helper.login_required
def get_order_info(order_id):
"""Returns the information for a single order.
:param order_id: The ID associated with the order. Can be found using get_all_orders(info=None) or get_all_orders(info=None).
:type order_id: str
:returns: Returns a list of dictionaries of key/value pairs for the order.
"""
url = urls.orders(order_id)
data = helper.request_get(url)
return data
@helper.login_required
def get_option_order_info(order_id):
"""Returns the information for a single order.
:param order_id: The ID associated with the order. Can be found using get_all_orders(info=None) or get_all_orders(info=None).
:type order_id: str
:returns: Returns a list of dictionaries of key/value pairs for the order.
"""
url = urls.option_orders(order_id)
data = helper.request_get(url)
return data
@helper.login_required
def find_orders(**arguments):
"""Returns a list of orders that match the keyword parameters.
:param arguments: Variable length of keyword arguments. EX. find_orders(symbol='FB',cancel=None,quantity=1)
:type arguments: str
:returns: Returns a list of orders.
"""
url = urls.orders()
data = helper.request_get(url, 'pagination')
if len(arguments) == 0:
return data
for item in data:
item['quantity'] = str(int(float(item['quantity'])))
if 'symbol' in arguments.keys():
arguments['instrument'] = stocks.get_instruments_by_symbols(arguments['symbol'], info='url')[0]
del arguments['symbol']
if 'quantity' in arguments.keys():
arguments['quantity'] = str(arguments['quantity'])
stop = len(arguments.keys()) - 1
list_of_orders = []
for item in data:
for i, (key, value) in enumerate(arguments.items()):
if key not in item:
print(helper.error_argument_not_key_in_dictionary(key))
return [None]
if value != item[key]:
break
if i == stop:
list_of_orders.append(item)
return list_of_orders
@helper.login_required
def cancel_all_open_option_orders():
"""Cancels all open orders.
:returns: The list of orders that were cancelled.
"""
items = get_all_open_option_orders()
for item in items:
cancel_url = item.get('cancel_url')
helper.request_post(cancel_url)
print('All Orders Cancelled')
return items
@helper.login_required
def cancel_all_open_orders():
"""Cancels all open orders.
:returns: The list of orders that were cancelled.
"""
url = urls.orders()
items = helper.request_get(url, 'pagination')
items = [item['id'] for item in items if item['cancel'] is not None]
for item in items:
cancel_url = urls.cancel(item)
helper.request_post(cancel_url)
print('All Orders Cancelled')
return items
@helper.login_required
def cancel_order(order_id):
"""Cancels a specific order.
:param order_id: The ID associated with the order. Can be found using get_all_orders(info=None) or get_all_orders(info=None).
:type order_id: str
:returns: Returns the order information for the order that was cancelled.
"""
url = urls.cancel(order_id)
data = helper.request_post(url)
if data:
print('Order ' + order_id + ' cancelled')
return data
@helper.login_required
def cancel_option_order(order_id):
"""Cancels a specific option order.
:param order_id: The ID associated with the order. Can be found using get_all_orders(info=None) or get_all_orders(info=None).
:type order_id: str
:returns: Returns the order information for the order that was cancelled.
"""
url = urls.option_cancel(order_id)
data = helper.request_post(url)
if data:
print('Order ' + order_id + ' cancelled')
return data
@helper.login_required
def order_buy_market(symbol, quantity, time_in_force='gtc', extended_hours='false'):
"""Submits a market order to be executed immediately.
:param symbol: The stock ticker of the stock to purchase.
:type symbol: str
:param quantity: The number of stocks to buy.
:type quantity: int
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:param extended_hours: Premium users only. Allows trading during extended hours. Should be true or false.
:type extended_hours: str
:returns: Dictionary that contains information regarding the purchase of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
except AttributeError as message:
print(message)
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': helper.round_price(stocks.get_latest_price(symbol)[0]),
'quantity': quantity,
'ref_id': str(uuid4()),
'type': 'market',
'stop_price': None,
'time_in_force': time_in_force,
'trigger': 'immediate',
'side': 'buy',
'extended_hours': extended_hours
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order_buy_limit(symbol, quantity, limit_price, time_in_force='gtc'):
"""Submits a limit order to be executed once a certain price is reached.
:param symbol: The stock ticker of the stock to purchase.
:type symbol: str
:param quantity: The number of stocks to buy.
:type quantity: int
:param limit_price: The price to trigger the buy order.
:type limit_price: float
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the purchase of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
limit_price = helper.round_price(limit_price)
except AttributeError as message:
print(message)
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': limit_price,
'quantity': quantity,
'ref_id': str(uuid4()),
'type': 'limit',
'stop_price': None,
'time_in_force': time_in_force,
'trigger': 'immediate',
'side': 'buy'
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order_buy_stop_loss(symbol, quantity, stop_price, time_in_force='gtc'):
"""Submits a stop order to be turned into a market order once a certain stop price is reached.
:param symbol: The stock ticker of the stock to purchase.
:type symbol: str
:param quantity: The number of stocks to buy.
:type quantity: int
:param stop_price: The price to trigger the market order.
:type stop_price: float
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the purchase of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
latest_price = helper.round_price(stocks.get_latest_price(symbol)[0])
stop_price = helper.round_price(stop_price)
except AttributeError as msg:
print(msg)
return None
if latest_price > stop_price:
print('Error: stop_price must be above the current price.')
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': stop_price,
'quantity': quantity,
'ref_id': str(uuid4()),
'type': 'market',
'stop_price': stop_price,
'time_in_force': time_in_force,
'trigger': 'stop',
'side': 'buy'
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order_buy_stop_limit(symbol, quantity, limit_price, stop_price, time_in_force='gtc'):
"""Submits a stop order to be turned into a limit order once a certain stop price is reached.
:param symbol: The stock ticker of the stock to purchase.
:type symbol: str
:param quantity: The number of stocks to buy.
:type quantity: int
:param limit_price: The price to trigger the market order.
:type limit_price: float
:param stop_price: The price to trigger the limit order.
:type stop_price: float
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the purchase of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
latest_price = helper.round_price(stocks.get_latest_price(symbol)[0])
stop_price = helper.round_price(stop_price)
limit_price = helper.round_price(limit_price)
except AttributeError as message:
print(message)
return None
if latest_price > stop_price:
print('Error: stop_price must be above the current price.')
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': limit_price,
'quantity': quantity,
'ref_id': str(uuid4()),
'type': 'limit',
'stop_price': stop_price,
'time_in_force': time_in_force,
'trigger': 'stop',
'side': 'buy'
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order_sell_market(symbol, quantity, time_in_force='gtc', extended_hours='false'):
"""Submits a market order to be executed immediately.
:param symbol: The stock ticker of the stock to sell.
:type symbol: str
:param quantity: The number of stocks to sell.
:type quantity: int
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:param extended_hours: Premium users only. Allows trading during extended hours. Should be true or false.
:type extended_hours: str
:returns: Dictionary that contains information regarding the selling of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
except AttributeError as message:
print(message)
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': helper.round_price(stocks.get_latest_price(symbol)[0]),
'quantity': quantity,
'ref_id': str(uuid4()),
'type': 'market',
'stop_price': None,
'time_in_force': time_in_force,
'trigger': 'immediate',
'side': 'sell',
'extended_hours': extended_hours
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order_sell_limit(symbol, quantity, limit_price, time_in_force='gtc'):
"""Submits a limit order to be executed once a certain price is reached.
:param symbol: The stock ticker of the stock to sell.
:type symbol: str
:param quantity: The number of stocks to sell.
:type quantity: int
:param limit_price: The price to trigger the sell order.
:type limit_price: float
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
limit_price = helper.round_price(limit_price)
except AttributeError as message:
print(message)
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': limit_price,
'quantity': quantity,
'ref_id': str(uuid4()),
'type': 'limit',
'stop_price': None,
'time_in_force': time_in_force,
'trigger': 'immediate',
'side': 'sell'
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order_sell_stop_loss(symbol, quantity, stop_price, time_in_force='gtc'):
"""Submits a stop order to be turned into a market order once a certain stop price is reached.
:param symbol: The stock ticker of the stock to sell.
:type symbol: str
:param quantity: The number of stocks to sell.
:type quantity: int
:param stop_price: The price to trigger the market order.
:type stop_price: float
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
latest_price = helper.round_price(stocks.get_latest_price(symbol)[0])
stop_price = helper.round_price(stop_price)
except AttributeError as message:
print(message)
return None
if latest_price < stop_price:
print('Error: stop_price must be below the current price.')
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': stop_price,
'quantity': quantity,
'ref_id': str(uuid4()),
'type': 'market',
'stop_price': stop_price,
'time_in_force': time_in_force,
'trigger': 'stop',
'side': 'sell'
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order_sell_stop_limit(symbol, quantity, limit_price, stop_price, time_in_force='gtc'):
"""Submits a stop order to be turned into a limit order once a certain stop price is reached.
:param symbol: The stock ticker of the stock to sell.
:type symbol: str
:param quantity: The number of stocks to sell.
:type quantity: int
:param limit_price: The price to trigger the market order.
:type limit_price: float
:param stop_price: The price to trigger the limit order.
:type stop_price: float
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
latest_price = helper.round_price(stocks.get_latest_price(symbol)[0])
stop_price = helper.round_price(stop_price)
limit_price = helper.round_price(limit_price)
except AttributeError as message:
print(message)
return None
if latest_price < stop_price:
print('Error: stop_price must be below the current price.')
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': limit_price,
'quantity': quantity,
'ref_id': str(uuid4()),
'type': 'limit',
'stop_price': stop_price,
'time_in_force': time_in_force,
'trigger': 'stop',
'side': 'sell'
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order(symbol, quantity, order_type, limit_price, stop_price, trigger, side, time_in_force, extended_hours):
"""A generic order function. All parameters must be supplied.
:param symbol: The stock ticker of the stock to sell.
:type symbol: str
:param quantity: The number of stocks to sell.
:type quantity: int
:param order_type: Either 'market' or 'limit'
:type order_type: str
:param limit_price: The price to trigger the market order.
:type limit_price: float
:param stop_price: The price to trigger the limit or market order.
:type stop_price: float
:param trigger: Either 'immediate' or 'stop'
:type trigger: str
:param side: Either 'buy' or 'sell'
:type side: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: str
:param extended_hours: Premium users only. Allows trading during extended hours. Should be true or false.
:type extended_hours: str
:returns: Dictionary that contains information regarding the purchase or selling of stocks, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
stop_price = helper.round_price(stop_price)
limit_price = helper.round_price(limit_price)
except AttributeError as message:
print(message)
return None
payload = {
'account': profiles.load_account_profile(info='url'),
'instrument': stocks.get_instruments_by_symbols(symbol, info='url')[0],
'symbol': symbol,
'price': limit_price,
'quantity': quantity,
'ref_id': str(uuid4()),
'type': order_type,
'stop_price': stop_price,
'time_in_force': time_in_force,
'trigger': trigger,
'side': side,
'extended_hours': extended_hours
}
url = urls.orders()
data = helper.request_post(url, payload)
return data
@helper.login_required
def order_option_credit_spread(price, symbol, quantity, spread, time_in_force='gfd'):
"""Submits a limit order for an option credit spread.
:param price: The limit price to trigger a sell of the option.
:type price: float
:param symbol: The stock ticker of the stock to trade.
:type symbol: str
:param quantity: The number of options to sell.
:type quantity: int
:param spread: A dictionary of spread options with the following keys: \n
- expiration_date: The expiration date of the option in 'YYYY-MM-DD' format.\n
- strike: The strike price of the option.\n
- option_type: This should be 'call' or 'put'
:type spread: dict
:param time_in_force: Changes how long the order will be in effect for.
'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
order_option_spread("credit", price, symbol, quantity, spread, time_in_force)
@helper.login_required
def order_option_debit_spread(price, symbol, quantity, spread, time_in_force='gfd'):
"""Submits a limit order for an option credit spread.
:param price: The limit price to trigger a sell of the option.
:type price: float
:param symbol: The stock ticker of the stock to trade.
:type symbol: str
:param quantity: The number of options to sell.
:type quantity: int
:param spread: A dictionary of spread options with the following keys: \n
:type spread: dict
param expiration_date: The expiration date of the option in 'YYYY-MM-DD' format.
type expiration_date: str
param strike: The strike price of the option.
type strike: float
param option_type: This should be 'call' or 'put'
type option_type: str
:param time_in_force: Changes how long the order will be in effect for.
'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
order_option_spread("debit", price, symbol, quantity, spread, time_in_force)
@helper.login_required
def order_option_spread(direction, price, symbol, quantity, spread, time_in_force='gfd'):
"""Submits a limit order for an option spread. i.e. place a debit / credit spread
:param direction: credit or debit spread
:type direction: str
:param price: The limit price to trigger a trade of the option.
:type price: float
:param symbol: The stock ticker of the stock to trade.
:type symbol: str
:param quantity: The number of options to trade.
:type quantity: int
:param spread: A dictionary of spread options with the following keys: \n
:type spread: dict
param expiration_date: The expiration date of the option in 'YYYY-MM-DD' format.
type expiration_date: str
param strike: The strike price of the option.
type strike: float
param option_type: This should be 'call' or 'put'
type option_type: str
:param time_in_force: Changes how long the order will be in effect for.
'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
"""
param expiration_date: The expiration date of the option in 'YYYY-MM-DD' format.
type expiration_date: str
param strike: The strike price of the option.
type strike: float
param option_type: This should be 'call' or 'put'
type option_type: str
"""
try:
symbol = symbol.upper().strip()
except AttributeError as message:
print(message)
return None
legs = []
for each in spread:
if each['effect'] == 'close':
option_id = options.id_of_options_to_close(symbol,
each['expiration_date'],
each['strike'],
each['option_type'])
else:
option_id = helper.id_for_option(symbol,
each['expiration_date'],
each['strike'],
each['option_type'])
legs.append({'position_effect': each['effect'],
'side': each['action'],
'ratio_quantity': 1,
'option': urls.option_instruments(option_id)})
payload = {
'account': profiles.load_account_profile(info='url'),
'direction': direction,
'time_in_force': time_in_force,
'legs': legs,
'type': 'limit',
'trigger': 'immediate',
'price': price,
'quantity': quantity,
'override_day_trade_checks': False,
'override_dtbp_checks': False,
'ref_id': str(uuid4()),
}
url = urls.option_orders()
data = helper.request_post(url, payload, json=True)
return data
@helper.login_required
def order_option_buy_to_open(price, symbol, quantity, expiration_date, strike, option_type='both', time_in_force='gfd'):
order_buy_option_limit(price, symbol, quantity, expiration_date, strike, option_type, time_in_force)
@helper.login_required
def order_buy_option_limit(price, symbol, quantity, expiration_date, strike, option_type='both', time_in_force='gfd'):
"""Submits a limit order for an option. i.e. place a long call or a long put.
:param price: The limit price to trigger a buy of the option.
:type price: float
:param symbol: The stock ticker of the stock to trade.
:type symbol: str
:param quantity: The number of options to buy.
:type quantity: int
:param expiration_date: The expiration date of the option in 'YYYY-MM-DD' format.
:type expiration_date: str
:param strike: The strike price of the option.
:type strike: float
:param option_type: This should be 'call' or 'put'
:type option_type: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
except AttributeError as message:
print(message)
return None
option_id = helper.id_for_option(symbol, expiration_date, str(strike), option_type)
payload = {
'account': profiles.load_account_profile(info='url'),
'direction': 'debit',
'time_in_force': time_in_force,
'legs': [
{'position_effect': 'open', 'side': 'buy', 'ratio_quantity': 1,
'option': urls.option_instruments(option_id)},
],
'type': 'limit',
'trigger': 'immediate',
'price': price,
'quantity': quantity,
'override_day_trade_checks': False,
'override_dtbp_checks': False,
'ref_id': str(uuid4()),
}
url = urls.option_orders()
data = helper.request_post(url, payload, json=True)
return data
@helper.login_required
def order_option_sell_to_close(price, symbol, quantity, expiration_date, strike, option_type='both',
time_in_force='gfd'):
"""
all close option order - do the lookup on the existing positions
:param price:
:param symbol:
:param quantity:
:param expiration_date:
:param strike:
:param option_type:
:param time_in_force:
:return:
"""
_id = options.id_of_options_to_close(symbol, expiration_date, strike, option_type, count=quantity, _type='long')
if _id:
return order_option_by_id(_id, price, quantity, direction='credit', effect='close', side='sell', time_in_force=time_in_force)
@helper.login_required
def order_option_by_id(option_id, price, quantity, direction='credit', effect='close', side='sell',
time_in_force='gfd'):
"""
:param option_id:
:param price:
:param quantity:
:param direction:
:param effect:
:param side:
:param time_in_force:
:return:
"""
payload = {
'account': profiles.load_account_profile(info='url'),
'direction': direction,
'time_in_force': time_in_force,
'legs': [
{'position_effect': effect, 'side': side, 'ratio_quantity': 1,
'option': urls.option_instruments(option_id)},
],
'type': 'limit',
'trigger': 'immediate',
'price': price,
'quantity': quantity,
'override_day_trade_checks': False,
'override_dtbp_checks': False,
'ref_id': str(uuid4()),
}
url = urls.option_orders()
data = helper.request_post(url, payload, json=True)
print(data)
return data
@helper.login_required
def order_sell_option_limit(price, symbol, quantity, expiration_date, strike, option_type='both', time_in_force='gfd'):
"""Submits a limit order for an option. i.e. place a short call or a short put.
:param price: The limit price to trigger a sell of the option.
:type price: float
:param symbol: The stock ticker of the stock to trade.
:type symbol: str
:param quantity: The number of options to sell.
:type quantity: int
:param expiration_date: The expiration date of the option in 'YYYY-MM-DD' format.
:type expiration_date: str
:param strike: The strike price of the option.
:type strike: float
:param option_type: This should be 'call' or 'put'
:type option_type: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
except AttributeError as message:
print(message)
return None
option_id = helper.id_for_option(symbol, expiration_date, str(strike), option_type)
payload = {
'account': profiles.load_account_profile(info='url'),
'direction': 'credit',
'time_in_force': time_in_force,
'legs': [
{'position_effect': 'close', 'side': 'sell', 'ratio_quantity': 1,
'option': urls.option_instruments(option_id)},
],
'type': 'limit',
'trigger': 'immediate',
'price': price,
'quantity': quantity,
'override_day_trade_checks': False,
'override_dtbp_checks': False,
'ref_id': str(uuid4()),
}
url = urls.option_orders()
data = helper.request_post(url, payload, json=True)
return data
@helper.login_required
def order_option_buy_to_close(price, symbol, quantity, expiration_date, strike, option_type='both',
time_in_force='gfd'):
"""Submits a limit order for an option. i.e. place a long call or a long put.
:param price: The limit price to trigger a buy of the option.
:type price: float
:param symbol: The stock ticker of the stock to trade.
:type symbol: str
:param quantity: The number of options to buy.
:type quantity: int
:param expiration_date: The expiration date of the option in 'YYYY-MM-DD' format.
:type expiration_date: str
:param strike: The strike price of the option.
:type strike: float
:param option_type: This should be 'call' or 'put'
:type option_type: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
_id = options.id_of_options_to_close(symbol, expiration_date, strike, option_type, count=quantity, _type='short')
if _id:
order_option_by_id(_id, price, quantity, direction='debit', effect='close', side='buy',
time_in_force=time_in_force)
@helper.login_required
def order_option_sell_to_open(price, symbol, quantity, expiration_date, strike, option_type='both',
time_in_force='gfd'):
"""Submits a limit order for an option. i.e. place a short call or a short put.
:param price: The limit price to trigger a sell of the option.
:type price: float
:param symbol: The stock ticker of the stock to trade.
:type symbol: str
:param quantity: The number of options to sell.
:type quantity: int
:param expiration_date: The expiration date of the option in 'YYYY-MM-DD' format.
:type expiration_date: str
:param strike: The strike price of the option.
:type strike: float
:param option_type: This should be 'call' or 'put'
:type option_type: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
except AttributeError as message:
print(message)
return None
option_id = helper.id_for_option(symbol, expiration_date, str(strike), option_type)
payload = {
'account': profiles.load_account_profile(info='url'),
'direction': 'credit',
'time_in_force': time_in_force,
'legs': [
{'position_effect': 'open', 'side': 'sell', 'ratio_quantity': 1,
'option': urls.option_instruments(option_id)},
],
'type': 'limit',
'trigger': 'immediate',
'price': price,
'quantity': quantity,
'override_day_trade_checks': False,
'override_dtbp_checks': False,
'ref_id': str(uuid4()),
}
url = urls.option_orders()
data = helper.request_post(url, payload, json=True)
return data
@helper.login_required
def order_buy_crypto_by_price(symbol, amount_in_dollars, price_type='ask_price', time_in_force='gtc'):
"""Submits a market order for a crypto by specifying the amount in dollars that you want to trade.
Good for share fractions up to 8 decimal places.
:param symbol: The crypto ticker of the crypto to trade.
:type symbol: str
:param amount_in_dollars: The amount in dollars of the crypto you want to buy.
:type amount_in_dollars: float
:param price_type: The type of price to get. Can be 'ask_price', 'bid_price', or 'mark_price'
:type price_type: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
except AttributeError as message:
print(message)
return None
crypto_info = crypto.get_crypto_info(symbol)
price = helper.round_price(crypto.get_crypto_quote_from_id(crypto_info['id'], info=price_type))
# turn the money amount into decimal number of shares
try:
shares = round(amount_in_dollars / price, 8)
except Exception as e:
print(repr(e))
shares = 0
payload = {
'mimeType': 'application/json',
'account_id': crypto.load_crypto_profile(info="id"),
'currency_pair_id': crypto_info['id'],
'price': price,
'quantity': shares,
'ref_id': str(uuid4()),
'side': 'buy',
'time_in_force': time_in_force,
'type': 'market'
}
url = urls.order_crypto()
data = helper.request_post(url, payload, json=True)
return data
@helper.login_required
def order_buy_crypto_by_quantity(symbol, quantity, price_type='ask_price', time_in_force='gtc'):
"""Submits a market order for a crypto by specifying the decimal amount of shares to buy.
Good for share fractions up to 8 decimal places.
:param symbol: The crypto ticker of the crypto to trade.
:type symbol: str
:param quantity: The decimal amount of shares to buy.
:type quantity: float
:param price_type: The type of price to get. Can be 'ask_price', 'bid_price', or 'mark_price'
:type price_type: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
crypto_info = crypto.get_crypto_info(symbol)
price = helper.round_price(crypto.get_crypto_quote_from_id(crypto_info['id'], info=price_type))
payload = {
'account_id': crypto.load_crypto_profile(info="id"),
'currency_pair_id': crypto_info['id'],
'price': price,
'quantity': quantity,
'ref_id': str(uuid4()),
'side': 'buy',
'time_in_force': time_in_force,
'type': 'market'
}
url = urls.order_crypto()
data = helper.request_post(url, payload, json=True)
return data
@helper.login_required
def order_sell_crypto_by_price(symbol, amount_in_dollars, price_type='ask_price', time_in_force='gtc'):
"""Submits a market order for a crypto by specifying the amount in dollars that you want to trade.
Good for share fractions up to 8 decimal places.
:param symbol: The crypto ticker of the crypto to trade.
:type symbol: str
:param amount_in_dollars: The amount in dollars of the crypto you want to sell.
:type amount_in_dollars: float
:param price_type: The type of price to get. Can be 'ask_price', 'bid_price', or 'mark_price'
:type price_type: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
try:
symbol = symbol.upper().strip()
except AttributeError as message:
print(message)
return None
crypto_info = crypto.get_crypto_info(symbol)
price = helper.round_price(crypto.get_crypto_quote_from_id(crypto_info['id'], info=price_type))
# turn the money amount into decimal number of shares
try:
shares = round(amount_in_dollars / float(price), 8)
except Exception as e:
print(repr(e))
shares = 0
payload = {
'account_id': crypto.load_crypto_profile(info="id"),
'currency_pair_id': crypto_info['id'],
'price': price,
'quantity': shares,
'ref_id': str(uuid4()),
'side': 'sell',
'time_in_force': time_in_force,
'type': 'market'
}
url = urls.order_crypto()
data = helper.request_post(url, payload, json=True)
return data
@helper.login_required
def order_sell_crypto_by_quantity(symbol, quantity, price_type='ask_price', time_in_force='gtc'):
"""Submits a market order for a crypto by specifying the decimal amount of shares to buy.
Good for share fractions up to 8 decimal places.
:param symbol: The crypto ticker of the crypto to trade.
:type symbol: str
:param quantity: The decimal amount of shares to sell.
:type quantity: float
:param price_type: The type of price to get. Can be 'ask_price', 'bid_price', or 'mark_price'
:type price_type: str
:param time_in_force: Changes how long the order will be in effect for. 'gtc' = good until cancelled. \
'gfd' = good for the day. 'ioc' = immediate or cancel. 'opg' execute at opening.
:type time_in_force: Optional[str]
:returns: Dictionary that contains information regarding the selling of options, \
such as the order id, the state of order (queued,confired,filled, failed, canceled, etc.), \
the price, and the quantity.
"""
crypto_info = crypto.get_crypto_info(symbol)
price = helper.round_price(crypto.get_crypto_quote_from_id(crypto_info['id'], info=price_type))
payload = {
'account_id': crypto.load_crypto_profile(info="id"),
'currency_pair_id': crypto_info['id'],
'price': price,
'quantity': quantity,
'ref_id': str(uuid4()),
'side': 'sell',
'time_in_force': time_in_force,
'type': 'market'
}
url = urls.order_crypto()
data = helper.request_post(url, payload, json=True)
return data
| 36.827419
| 133
| 0.663251
| 6,238
| 45,666
| 4.704392
| 0.045367
| 0.022081
| 0.040483
| 0.025489
| 0.919648
| 0.913958
| 0.906665
| 0.900532
| 0.893137
| 0.884959
| 0
| 0.001636
| 0.23696
| 45,666
| 1,239
| 134
| 36.857143
| 0.840551
| 0.464635
| 0
| 0.783418
| 0
| 0
| 0.129775
| 0.005565
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05753
| false
| 0.001692
| 0.011844
| 0
| 0.153976
| 0.049069
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
692be55bf70b16b764b85c00c2dd64b4fa5d9235
| 22,562
|
py
|
Python
|
tests/unit/test_villages.py
|
didadadida93/tkpy
|
c8cc41e2115cca7d975a68e418c462a4da3232a9
|
[
"MIT"
] | 1
|
2019-11-12T12:56:54.000Z
|
2019-11-12T12:56:54.000Z
|
tests/unit/test_villages.py
|
didadadida93/tkpy
|
c8cc41e2115cca7d975a68e418c462a4da3232a9
|
[
"MIT"
] | 1
|
2019-02-16T17:06:04.000Z
|
2019-02-16T20:53:05.000Z
|
tests/unit/test_villages.py
|
didadadida93/tkpy
|
c8cc41e2115cca7d975a68e418c462a4da3232a9
|
[
"MIT"
] | 5
|
2018-12-23T14:52:32.000Z
|
2021-07-17T06:11:15.000Z
|
from tkpy.villages import Villages
from tkpy.exception import QueueFull
from tkpy.exception import VillageNotFound
from tkpy.exception import BuildingSlotFull
from tkpy.exception import BuildingAtMaxLevel
from tkpy.exception import WarehouseNotEnough
from tkpy.exception import FailedConstructBuilding
from tkpy.enums.tribe import Tribe
from tkpy.enums.building import BuildingType
import unittest
import requests_mock
import pickle
import json
class TestVillages(unittest.TestCase):
def testing_villages(self):
g = None
v = None
villages_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
self.assertEqual(v["001"].name, "001")
self.assertEqual(len(list(x for x in v)), 2)
self.assertEqual(len(list(v.keys())), 2)
self.assertEqual(len(list(v.items())), 2)
self.assertEqual(len(list(v.values())), 2)
with self.assertRaises(VillageNotFound):
v["village not found"]
cv = v.get_capital_village()
self.assertEqual(cv.name, "001")
class TestVillage(unittest.TestCase):
def testing_property_village(self):
g = None
v = None
villages_raw = {}
village_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with open("./tests/unit/fixtures/village_raw.json", "r") as f:
village_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
v1 = v["001"]
self.assertEqual(v1["name"], "001")
self.assertEqual(v1.id, 538230818)
self.assertEqual(v1.name, "001")
self.assertEqual(v1.coordinate, (-24, -13))
self.assertTrue(v1.is_main_village)
with self.assertRaises(KeyError):
v1["key error"]
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=village_raw
)
v1.pull()
self.assertEqual(v1["name"], "001")
self.assertEqual(v1.id, 536461288)
self.assertEqual(v1.name, "001")
self.assertEqual(v1.coordinate, (-24, -13))
self.assertTrue(v1.is_main_village)
with self.assertRaises(KeyError):
v1["key error"]
def testing_village_send_attack(self):
g = None
v = None
villages_raw = {}
raw_rally_point = {}
raw_check_target = {}
send_troops_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with open("./tests/unit/fixtures/raw_rally_point.json", "r") as f:
raw_rally_point = json.load(f)
with open("./tests/unit/fixtures/raw_check_target.json", "r") as f:
raw_check_target = json.load(f)
with open("./tests/unit/fixtures/send_troops_raw.json", "r") as f:
send_troops_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": raw_rally_point},
{"json": raw_check_target},
{"json": send_troops_raw},
],
)
r = v["001"].send_attack(1, 1)
self.assertEqual(r, send_troops_raw)
def testing_village_send_raid(self):
g = None
v = None
villages_raw = {}
raw_rally_point = {}
raw_check_target = {}
send_troops_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with open("./tests/unit/fixtures/raw_rally_point.json", "r") as f:
raw_rally_point = json.load(f)
with open("./tests/unit/fixtures/raw_check_target.json", "r") as f:
raw_check_target = json.load(f)
with open("./tests/unit/fixtures/send_troops_raw.json", "r") as f:
send_troops_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": raw_rally_point},
{"json": raw_check_target},
{"json": send_troops_raw},
],
)
r = v["001"].send_raid(1, 1)
self.assertEqual(r, send_troops_raw)
def testing_village_send_defend(self):
g = None
v = None
villages_raw = {}
raw_rally_point = {}
raw_check_target = {}
send_troops_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with open("./tests/unit/fixtures/raw_rally_point.json", "r") as f:
raw_rally_point = json.load(f)
with open("./tests/unit/fixtures/raw_check_target.json", "r") as f:
raw_check_target = json.load(f)
with open("./tests/unit/fixtures/send_troops_raw.json", "r") as f:
send_troops_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": raw_rally_point},
{"json": raw_check_target},
{"json": send_troops_raw},
],
)
r = v["001"].send_defend(1, 1)
self.assertEqual(r, send_troops_raw)
def testing_village_send_spy(self):
g = None
v = None
villages_raw = {}
raw_rally_point = {}
raw_check_target = {}
send_troops_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with open("./tests/unit/fixtures/raw_rally_point.json", "r") as f:
raw_rally_point = json.load(f)
with open("./tests/unit/fixtures/raw_check_target.json", "r") as f:
raw_check_target = json.load(f)
with open("./tests/unit/fixtures/send_troops_raw.json", "r") as f:
send_troops_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": raw_rally_point},
{"json": raw_check_target},
{"json": send_troops_raw},
],
)
r = v["001"].send_spy(1, 1)
self.assertEqual(r, send_troops_raw)
def testing_village_send_siege(self):
g = None
v = None
villages_raw = {}
raw_rally_point = {}
raw_check_target = {}
send_troops_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with open("./tests/unit/fixtures/raw_rally_point.json", "r") as f:
raw_rally_point = json.load(f)
with open("./tests/unit/fixtures/raw_check_target.json", "r") as f:
raw_check_target = json.load(f)
with open("./tests/unit/fixtures/send_troops_raw.json", "r") as f:
send_troops_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": raw_rally_point},
{"json": raw_check_target},
{"json": send_troops_raw},
],
)
r = v["001"].send_siege(1, 1)
self.assertEqual(r, send_troops_raw)
def testing_village_send_farmlist(self):
g = None
v = None
villages_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[{"json": villages_raw}, {"json": {"mock": "mocked"}}],
)
v = Villages(g)
v.pull()
r = v["001"].send_farmlist([123])
self.assertEqual(r, {"mock": "mocked"})
def testing_village_upgrade(self):
g = None
v = None
villages_raw = {}
village_raw = {}
buildings_raw = {}
building_queue_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with open("./tests/unit/fixtures/village_raw.json", "r") as f:
village_raw = json.load(f)
with open("./tests/unit/fixtures/buildings_raw.json", "r") as f:
buildings_raw = json.load(f)
with open("./tests/unit/fixtures/building_queue_raw.json", "r") as f:
building_queue_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
r = v["001"].upgrade(BuildingType.MAIN_BUILDING)
self.assertEqual(r, {"mock": "mocked"})
for x in buildings_raw["cache"][0]["data"]["cache"]:
if x["data"]["buildingType"] == "1":
x["data"]["upgradeCosts"] = {"1": 0, "2": 0, "3": 0, "4": 0}
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
r = v["001"].upgrade(BuildingType.WOODCUTTER)
self.assertEqual(r, {"mock": "mocked"})
buildings_raw["cache"][0]["data"]["cache"][0]["data"]["upgradeCosts"][
"1"
] = 999999999
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
with self.assertRaises(WarehouseNotEnough):
r = v["001"].upgrade(BuildingType.MAIN_BUILDING)
v["001"].client.tribe_id = Tribe.ROMAN
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
r = v["001"].upgrade(BuildingType.WOODCUTTER)
self.assertEqual(r, {"mock": "mocked"})
building_queue_raw["cache"][0]["data"]["freeSlots"]["2"] = 0
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
r = v["001"].upgrade(BuildingType.WOODCUTTER)
building_queue_raw["cache"][0]["data"]["freeSlots"]["4"] = 0
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
with self.assertRaises(QueueFull):
r = v["001"].upgrade(BuildingType.WOODCUTTER)
buildings_raw["cache"][0]["data"]["cache"][0]["data"]["isMaxLvl"] = True
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
with self.assertRaises(BuildingAtMaxLevel):
r = v["001"].upgrade(BuildingType.MAIN_BUILDING)
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
with self.assertRaises(Exception):
r = v["001"].upgrade(BuildingType.SMITHY)
def testing_village_construct(self):
g = None
v = None
villages_raw = {}
village_raw = {}
buildings_raw = {}
building_queue_raw = {}
construction_list_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with open("./tests/unit/fixtures/village_raw.json", "r") as f:
village_raw = json.load(f)
with open("./tests/unit/fixtures/buildings_raw.json", "r") as f:
buildings_raw = json.load(f)
with open("./tests/unit/fixtures/building_queue_raw.json", "r") as f:
building_queue_raw = json.load(f)
with open("./tests/unit/fixtures/construction_list_raw.json", "r") as f:
construction_list_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": {"mock": "mocked"}},
],
)
with self.assertRaises(KeyError):
r = v["001"].construct(BuildingType.STABLE)
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": construction_list_raw},
{"json": {"mock": "mocked"}},
],
)
r = v["001"].construct(BuildingType.SMITHY)
self.assertEqual(r, {"mock": "mocked"})
construction_list_raw["response"]["buildings"]["buildable"][0][
"upgradeCosts"
] = {"1": 0, "2": 0, "3": 0, "4": 0}
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": construction_list_raw},
{"json": {"mock": "mocked"}},
],
)
r = v["001"].construct(BuildingType.SMITHY)
self.assertEqual(r, {"mock": "mocked"})
construction_list_raw["response"]["buildings"]["buildable"][0][
"upgradeCosts"
] = {"1": 0, "2": 0, "3": 0, "4": 9999999999}
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": construction_list_raw},
{"json": {"mock": "mocked"}},
],
)
with self.assertRaises(WarehouseNotEnough):
r = v["001"].construct(BuildingType.SMITHY)
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": construction_list_raw},
{"json": {"mock": "mocked"}},
],
)
with self.assertRaises(FailedConstructBuilding):
r = v["001"].construct(BuildingType.STABLE)
for x in buildings_raw["cache"][0]["data"]["cache"]:
x["data"]["buildingType"] = "1"
with requests_mock.mock() as mock:
mock.register_uri(
"POST",
"https://com1.kingdoms.com/api/",
[
{"json": village_raw},
{"json": buildings_raw},
{"json": building_queue_raw},
{"json": construction_list_raw},
],
)
with self.assertRaises(BuildingSlotFull):
r = v["001"].construct(BuildingType.SMITHY)
class TestWarehouse(unittest.TestCase):
def testing_warehouse(self):
g = None
v = None
villages_raw = {}
with open("./tests/unit/fixtures/pickled_driver.py", "rb") as f:
g = pickle.load(f)
with open("./tests/unit/fixtures/villages_raw.json", "r") as f:
villages_raw = json.load(f)
with requests_mock.mock() as mock:
mock.register_uri(
"POST", "https://com1.kingdoms.com/api/", json=villages_raw
)
v = Villages(g)
v.pull()
v1 = v["001"]
self.assertEqual(
v1.warehouse.storage,
{
"1": 929.07843923224,
"2": 712.9796366545,
"3": 691.80224180989,
"4": 641.28190453036,
},
)
self.assertEqual(
v1.warehouse.production, {"1": 1290, "2": 990, "3": 960, "4": 890}
)
self.assertEqual(
v1.warehouse.capacity, {"1": 22500, "2": 22500, "3": 22500, "4": 15000}
)
self.assertEqual(v1.warehouse.wood, "929.07843923224/22500 1290")
self.assertEqual(v1.warehouse.clay, "712.9796366545/22500 990")
self.assertEqual(v1.warehouse.iron, "691.80224180989/22500 960")
self.assertEqual(v1.warehouse.crop, "641.28190453036/15000 890")
self.assertEqual(v1.warehouse["1"], 929.07843923224)
self.assertEqual(v1.warehouse["wood"], 929.07843923224)
with self.assertRaises(KeyError):
v1.warehouse["key error"]
if __name__ == "__main__":
unittest.main()
| 33.474777
| 83
| 0.494726
| 2,393
| 22,562
| 4.493105
| 0.061847
| 0.061849
| 0.054408
| 0.07115
| 0.856213
| 0.838542
| 0.817243
| 0.795201
| 0.789249
| 0.771763
| 0
| 0.03242
| 0.364285
| 22,562
| 673
| 84
| 33.524517
| 0.717214
| 0
| 0
| 0.752228
| 0
| 0
| 0.176004
| 0.083681
| 0
| 0
| 0
| 0
| 0.085562
| 1
| 0.019608
| false
| 0
| 0.023173
| 0
| 0.048128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ba336f482eedd1a8c155e51e268009119a196519
| 26
|
py
|
Python
|
cb1521_script.py
|
estellespanneut/example-open-source-repo-2021
|
413735c3aac710999229970736553c393b23a49f
|
[
"MIT"
] | null | null | null |
cb1521_script.py
|
estellespanneut/example-open-source-repo-2021
|
413735c3aac710999229970736553c393b23a49f
|
[
"MIT"
] | 1
|
2021-06-15T23:05:23.000Z
|
2021-06-15T23:05:23.000Z
|
cb1521_script.py
|
estellespanneut/example-open-source-repo-2021
|
413735c3aac710999229970736553c393b23a49f
|
[
"MIT"
] | 78
|
2021-03-15T21:54:31.000Z
|
2021-07-28T05:41:32.000Z
|
print("Hello hello hello")
| 26
| 26
| 0.769231
| 4
| 26
| 5
| 0.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 26
| 1
| 26
| 26
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.62963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ba946b852a859c5028364dc6a68f879bfbe7918d
| 302
|
py
|
Python
|
pymt_landlab/__init__.py
|
pymt-lab/pymt_landlab
|
9914c0e1a57cc1ffcde12aa220ed75b9813b7641
|
[
"MIT"
] | null | null | null |
pymt_landlab/__init__.py
|
pymt-lab/pymt_landlab
|
9914c0e1a57cc1ffcde12aa220ed75b9813b7641
|
[
"MIT"
] | null | null | null |
pymt_landlab/__init__.py
|
pymt-lab/pymt_landlab
|
9914c0e1a57cc1ffcde12aa220ed75b9813b7641
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
from .bmi import (OverlandFlow,Flexure,LinearDiffuser,ExponentialWeatherer,TransportLengthHillslopeDiffuser,Vegetation,SoilMoisture,
)
__all__ = ["OverlandFlow","Flexure","LinearDiffuser","ExponentialWeatherer","TransportLengthHillslopeDiffuser","Vegetation","SoilMoisture",
]
| 37.75
| 139
| 0.824503
| 22
| 302
| 11.136364
| 0.681818
| 0.155102
| 0.269388
| 0.432653
| 0.873469
| 0.873469
| 0.873469
| 0
| 0
| 0
| 0
| 0
| 0.046358
| 302
| 7
| 140
| 43.142857
| 0.850694
| 0.069536
| 0
| 0
| 0
| 0
| 0.382143
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
baad43ee2c5715a3f5362349607f34d0ef8aaf56
| 29,030
|
py
|
Python
|
src/solidity_types.py
|
mimirblockchainsolutions/w3aio
|
ec5880a53970bf3946c5feaa2c20119b48fb77d1
|
[
"MIT"
] | 3
|
2018-10-18T18:43:31.000Z
|
2020-10-01T23:45:20.000Z
|
src/solidity_types.py
|
mimirblockchainsolutions/w3aio
|
ec5880a53970bf3946c5feaa2c20119b48fb77d1
|
[
"MIT"
] | null | null | null |
src/solidity_types.py
|
mimirblockchainsolutions/w3aio
|
ec5880a53970bf3946c5feaa2c20119b48fb77d1
|
[
"MIT"
] | null | null | null |
from .hextools import ( check_length_eq,
checkHex,
pad,
to_hex,
trim0x, )
from .keccak import keccak
from .soliditykeccak import solidityKeccak
from .exceptions import (BadInitialType, BadComparisonType)
import binascii
import codecs
import logging
log = logging.getLogger(__name__)
#TODO int Types
#TODO fixed types
#TODO bytes
#TODO string
#TODO dynamic?
class Bytes32(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 64
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
else:
raise BadInitialType
def _init_with_string(self,_data):
_data=trim0x(_data)
check_length_eq(_data,self._length)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def as_int(self):
return int(self._data,16)
def hash(self):
_data = keccak(self.as_str())
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_bytes()
class Bytes16(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 32
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
else:
raise BadInitialType
def _init_with_string(self,_data):
_data=trim0x(_data)
check_length_eq(_data,self._length)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def as_int(self):
return int(self._data,16)
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_bytes()
class Bytes8(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 16
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
else:
raise BadInitialType
def _init_with_string(self,_data):
_data=trim0x(_data)
check_length_eq(_data,self._length)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def as_int(self):
return int(self._data,16)
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_bytes()
class Bytes4(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 8
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
else:
raise BadInitialType
def _init_with_string(self,_data):
_data=trim0x(_data)
check_length_eq(_data,self._length)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def as_int(self):
return int(self._data,16)
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_bytes()
class Address(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 40
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
else:
raise BadInitialType
def _init_with_string(self,_data):
_data=trim0x(_data)
check_length_eq(_data,self._length)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def as_int(self):
return int(self._data,16)
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_str()
class Uint256(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 64
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
elif type(_data) == int:
self._init_with_int(_data)
else:
raise BadInitialType
assert self.as_int()< 2**(int(self._length*4)),"Number too large."
assert self.as_int()>= 0,"Unsigned means positive. Try again."
def _init_with_string(self,_data):
_data=trim0x(_data)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _init_with_int(self,_data):
_data = hex(_data)
_data = trim0x(_data)
_data = pad(_data,self._length)
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_int(self):
return int(self._data,16)
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __le__(self, other):
other = self._isinstance(other)
return self.as_int() <= other.as_int()
def __ge__(self, other):
other = self._isinstance(other)
return self.as_int() >= other.as_int()
def __gt__(self, other):
other = self._isinstance(other)
return self.as_int() > other.as_int()
def __lt__(self, other):
other = self._isinstance(other)
return self.as_int() < other.as_int()
def __add__(self, other):
other = self._isinstance(other)
out = other.as_int()+self.as_int()
return self._build(out)
def __sub__(self, other):
other = self._isinstance(other)
out = other.as_int()-self.as_int()
return self._build(out)
def __mul__(self, other):
other = self._isinstance(other)
out = other.as_int() * self.as_int()
return self._build(out)
def _build(self,_data):
return type(self)(_data)
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def __index__(self):
return self.as_int()
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_int()
class Uint128(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 2
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
elif type(_data) == int:
self._init_with_int(_data)
else:
raise BadInitialType
assert self.as_int()< 2**(int(self._length*4)),"Number too large."
assert self.as_int()>= 0,"Unsigned means positive. Try again."
def _init_with_string(self,_data):
_data=trim0x(_data)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _init_with_int(self,_data):
_data = hex(_data)
_data = trim0x(_data)
_data = pad(_data,self._length)
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_int(self):
return int(self._data,16)
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __le__(self, other):
other = self._isinstance(other)
return self.as_int() <= other.as_int()
def __ge__(self, other):
other = self._isinstance(other)
return self.as_int() >= other.as_int()
def __gt__(self, other):
other = self._isinstance(other)
return self.as_int() > other.as_int()
def __lt__(self, other):
other = self._isinstance(other)
return self.as_int() < other.as_int()
def __add__(self, other):
other = self._isinstance(other)
out = other.as_int()+self.as_int()
return self._build(out)
def __sub__(self, other):
other = self._isinstance(other)
out = other.as_int()-self.as_int()
return self._build(out)
def __mul__(self, other):
other = self._isinstance(other)
out = other.as_int() * self.as_int()
return self._build(out)
def _build(self,_data):
return type(self)(_data)
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def __index__(self):
return self.as_int()
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_int()
class Uint64(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 16
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
elif type(_data) == int:
self._init_with_int(_data)
else:
raise BadInitialType
assert self.as_int()< 2**(int(self._length*4)),"Number too large."
assert self.as_int()>= 0,"Unsigned means positive. Try again."
def _init_with_string(self,_data):
_data=trim0x(_data)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _init_with_int(self,_data):
_data = hex(_data)
_data = trim0x(_data)
_data = pad(_data,self._length)
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_int(self):
return int(self._data,16)
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __le__(self, other):
other = self._isinstance(other)
return self.as_int() <= other.as_int()
def __ge__(self, other):
other = self._isinstance(other)
return self.as_int() >= other.as_int()
def __gt__(self, other):
other = self._isinstance(other)
return self.as_int() > other.as_int()
def __lt__(self, other):
other = self._isinstance(other)
return self.as_int() < other.as_int()
def __add__(self, other):
other = self._isinstance(other)
out = other.as_int()+self.as_int()
return self._build(out)
def __sub__(self, other):
other = self._isinstance(other)
out = other.as_int()-self.as_int()
return self._build(out)
def __mul__(self, other):
other = self._isinstance(other)
out = other.as_int() * self.as_int()
return self._build(out)
def _build(self,_data):
return type(self)(_data)
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def __index__(self):
return self.as_int()
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_int()
class Uint32(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 8
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
elif type(_data) == int:
self._init_with_int(_data)
else:
raise BadInitialType
assert self.as_int()< 2**(int(self._length*4)),"Number too large."
assert self.as_int()>= 0,"Unsigned means positive. Try again."
def _init_with_string(self,_data):
_data=trim0x(_data)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _init_with_int(self,_data):
_data = hex(_data)
_data = trim0x(_data)
_data = pad(_data,self._length)
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_int(self):
return int(self._data,16)
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __le__(self, other):
other = self._isinstance(other)
return self.as_int() <= other.as_int()
def __ge__(self, other):
other = self._isinstance(other)
return self.as_int() >= other.as_int()
def __gt__(self, other):
other = self._isinstance(other)
return self.as_int() > other.as_int()
def __lt__(self, other):
other = self._isinstance(other)
return self.as_int() < other.as_int()
def __add__(self, other):
other = self._isinstance(other)
out = other.as_int()+self.as_int()
return self._build(out)
def __sub__(self, other):
other = self._isinstance(other)
out = other.as_int()-self.as_int()
return self._build(out)
def __mul__(self, other):
other = self._isinstance(other)
out = other.as_int() * self.as_int()
return self._build(out)
def _build(self,_data):
return type(self)(_data)
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def __index__(self):
return self.as_int()
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_int()
class Uint16(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 4
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
elif type(_data) == int:
self._init_with_int(_data)
else:
raise BadInitialType
assert self.as_int()< 2**(int(self._length*4)),"Number too large."
assert self.as_int()>= 0,"Unsigned means positive. Try again."
def _init_with_string(self,_data):
_data=trim0x(_data)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _init_with_int(self,_data):
_data = hex(_data)
_data = trim0x(_data)
_data = pad(_data,self._length)
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_int(self):
return int(self._data,16)
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __le__(self, other):
other = self._isinstance(other)
return self.as_int() <= other.as_int()
def __ge__(self, other):
other = self._isinstance(other)
return self.as_int() >= other.as_int()
def __gt__(self, other):
other = self._isinstance(other)
return self.as_int() > other.as_int()
def __lt__(self, other):
other = self._isinstance(other)
return self.as_int() < other.as_int()
def __add__(self, other):
other = self._isinstance(other)
out = other.as_int()+self.as_int()
return self._build(out)
def __sub__(self, other):
other = self._isinstance(other)
out = other.as_int()-self.as_int()
return self._build(out)
def __mul__(self, other):
other = self._isinstance(other)
out = other.as_int() * self.as_int()
return self._build(out)
def _build(self,_data):
return type(self)(_data)
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def __index__(self):
return self.as_int()
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_int()
class Uint8(object):
__slots__ = ["_data","_length"]
def __init__(self, _data):
self._length = 2
if type(_data) == bytes:
self._init_with_bytes(_data)
elif type(_data) == str:
self._init_with_string(_data)
elif type(_data) == int:
self._init_with_int(_data)
else:
raise BadInitialType
assert self.as_int()< 2**(int(self._length*4)),"Number too large."
assert self.as_int()>= 0,"Unsigned means positive. Try again."
def _init_with_string(self,_data):
_data=trim0x(_data)
_data = pad(_data,self._length)
if type(_data) is str:
checkHex(_data)
self._data = _data
else:
raise BadInitialType
def _init_with_bytes(self,_data):
_data = codecs.decode(binascii.hexlify(_data),'utf-8')
self._init_with_string(_data)
def _init_with_int(self,_data):
_data = hex(_data)
_data = trim0x(_data)
_data = pad(_data,self._length)
self._init_with_string(_data)
def _isinstance(self,other):
if not self.__class__.__name__ == other.__class__.__name__:
log.error('TYPE ERROR')
raise BadComparisonType
return other
def as_int(self):
return int(self._data,16)
def as_bytes(self):
return binascii.unhexlify(self._data)
def as_str(self):
return '0x'+self._data
def hash(self):
_data = keccak(trim0x(self.as_str()))
return Bytes32(_data)
def __len__(self):
return len(self._data)
def __str__(self):
return self.as_str()
def __eq__(self, other):
other = self._isinstance(other)
return self.as_int() == other.as_int()
def __le__(self, other):
other = self._isinstance(other)
return self.as_int() <= other.as_int()
def __ge__(self, other):
other = self._isinstance(other)
return self.as_int() >= other.as_int()
def __gt__(self, other):
other = self._isinstance(other)
return self.as_int() > other.as_int()
def __lt__(self, other):
other = self._isinstance(other)
return self.as_int() < other.as_int()
def __add__(self, other):
other = self._isinstance(other)
out = other.as_int()+self.as_int()
return self._build(out)
def __sub__(self, other):
other = self._isinstance(other)
out = other.as_int()-self.as_int()
return self._build(out)
def __mul__(self, other):
other = self._isinstance(other)
out = other.as_int() * self.as_int()
return self._build(out)
def _build(self,_data):
return type(self)(_data)
def __repr__(self):
return self.as_str()
def __hash__(self):
cls_hash = Bytes32(keccak(to_hex(self.__class__.__name__)))
data_hash = self.hash()
ident_hash = solidityKeccak([cls_hash,data_hash])
return int(trim0x(ident_hash),16)
def __index__(self):
return self.as_int()
def replace(self,rep,val):
return self.as_str().replace(rep,val)
def encode(self):
return self.as_int()
| 26.929499
| 74
| 0.600551
| 3,627
| 29,030
| 4.316239
| 0.028122
| 0.045034
| 0.065155
| 0.060939
| 0.976557
| 0.976365
| 0.976365
| 0.976365
| 0.976365
| 0.976365
| 0
| 0.009761
| 0.283603
| 29,030
| 1,077
| 75
| 26.954503
| 0.742992
| 0.002205
| 0
| 0.967153
| 0
| 0
| 0.021788
| 0
| 0
| 0
| 0
| 0.000929
| 0.014599
| 1
| 0.273723
| false
| 0
| 0.008516
| 0.121655
| 0.53528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
baaf7151cd9e7215ef2344c5ccc18a5319d48a18
| 21,509
|
py
|
Python
|
sdk/python/pulumi_alicloud/apigateway/_inputs.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/apigateway/_inputs.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/apigateway/_inputs.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'ApiConstantParameterArgs',
'ApiFcServiceConfigArgs',
'ApiHttpServiceConfigArgs',
'ApiHttpVpcServiceConfigArgs',
'ApiMockServiceConfigArgs',
'ApiRequestConfigArgs',
'ApiRequestParameterArgs',
'ApiSystemParameterArgs',
]
@pulumi.input_type
class ApiConstantParameterArgs:
def __init__(__self__, *,
in_: pulumi.Input[str],
name: pulumi.Input[str],
value: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] in_: System parameter location; values: 'HEAD' and 'QUERY'.
:param pulumi.Input[str] name: System parameter name which supports values including in [system parameter list](https://www.alibabacloud.com/help/doc-detail/43677.html)
:param pulumi.Input[str] value: Constant parameter value.
:param pulumi.Input[str] description: The description of Constant parameter.
"""
pulumi.set(__self__, "in_", in_)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter(name="in")
def in_(self) -> pulumi.Input[str]:
"""
System parameter location; values: 'HEAD' and 'QUERY'.
"""
return pulumi.get(self, "in_")
@in_.setter
def in_(self, value: pulumi.Input[str]):
pulumi.set(self, "in_", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
System parameter name which supports values including in [system parameter list](https://www.alibabacloud.com/help/doc-detail/43677.html)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
Constant parameter value.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of Constant parameter.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class ApiFcServiceConfigArgs:
def __init__(__self__, *,
function_name: pulumi.Input[str],
region: pulumi.Input[str],
service_name: pulumi.Input[str],
timeout: pulumi.Input[int],
arn_role: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] function_name: The function name of function compute service.
:param pulumi.Input[str] region: The region that the function compute service belongs to.
:param pulumi.Input[str] service_name: The service name of function compute service.
:param pulumi.Input[int] timeout: Backend service time-out time; unit: millisecond.
:param pulumi.Input[str] arn_role: RAM role arn attached to the Function Compute service. This governs both who / what can invoke your Function, as well as what resources our Function has access to. See [User Permissions](https://www.alibabacloud.com/help/doc-detail/52885.htm) for more details.
"""
pulumi.set(__self__, "function_name", function_name)
pulumi.set(__self__, "region", region)
pulumi.set(__self__, "service_name", service_name)
pulumi.set(__self__, "timeout", timeout)
if arn_role is not None:
pulumi.set(__self__, "arn_role", arn_role)
@property
@pulumi.getter(name="functionName")
def function_name(self) -> pulumi.Input[str]:
"""
The function name of function compute service.
"""
return pulumi.get(self, "function_name")
@function_name.setter
def function_name(self, value: pulumi.Input[str]):
pulumi.set(self, "function_name", value)
@property
@pulumi.getter
def region(self) -> pulumi.Input[str]:
"""
The region that the function compute service belongs to.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: pulumi.Input[str]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
The service name of function compute service.
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter
def timeout(self) -> pulumi.Input[int]:
"""
Backend service time-out time; unit: millisecond.
"""
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: pulumi.Input[int]):
pulumi.set(self, "timeout", value)
@property
@pulumi.getter(name="arnRole")
def arn_role(self) -> Optional[pulumi.Input[str]]:
"""
RAM role arn attached to the Function Compute service. This governs both who / what can invoke your Function, as well as what resources our Function has access to. See [User Permissions](https://www.alibabacloud.com/help/doc-detail/52885.htm) for more details.
"""
return pulumi.get(self, "arn_role")
@arn_role.setter
def arn_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn_role", value)
@pulumi.input_type
class ApiHttpServiceConfigArgs:
def __init__(__self__, *,
address: pulumi.Input[str],
method: pulumi.Input[str],
path: pulumi.Input[str],
timeout: pulumi.Input[int],
aone_name: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] address: The address of backend service.
:param pulumi.Input[str] method: The http method of backend service.
:param pulumi.Input[str] path: The path of backend service.
:param pulumi.Input[int] timeout: Backend service time-out time; unit: millisecond.
"""
pulumi.set(__self__, "address", address)
pulumi.set(__self__, "method", method)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "timeout", timeout)
if aone_name is not None:
pulumi.set(__self__, "aone_name", aone_name)
@property
@pulumi.getter
def address(self) -> pulumi.Input[str]:
"""
The address of backend service.
"""
return pulumi.get(self, "address")
@address.setter
def address(self, value: pulumi.Input[str]):
pulumi.set(self, "address", value)
@property
@pulumi.getter
def method(self) -> pulumi.Input[str]:
"""
The http method of backend service.
"""
return pulumi.get(self, "method")
@method.setter
def method(self, value: pulumi.Input[str]):
pulumi.set(self, "method", value)
@property
@pulumi.getter
def path(self) -> pulumi.Input[str]:
"""
The path of backend service.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: pulumi.Input[str]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def timeout(self) -> pulumi.Input[int]:
"""
Backend service time-out time; unit: millisecond.
"""
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: pulumi.Input[int]):
pulumi.set(self, "timeout", value)
@property
@pulumi.getter(name="aoneName")
def aone_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "aone_name")
@aone_name.setter
def aone_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aone_name", value)
@pulumi.input_type
class ApiHttpVpcServiceConfigArgs:
def __init__(__self__, *,
method: pulumi.Input[str],
name: pulumi.Input[str],
path: pulumi.Input[str],
timeout: pulumi.Input[int],
aone_name: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] method: The http method of backend service.
:param pulumi.Input[str] name: System parameter name which supports values including in [system parameter list](https://www.alibabacloud.com/help/doc-detail/43677.html)
:param pulumi.Input[str] path: The path of backend service.
:param pulumi.Input[int] timeout: Backend service time-out time; unit: millisecond.
"""
pulumi.set(__self__, "method", method)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "timeout", timeout)
if aone_name is not None:
pulumi.set(__self__, "aone_name", aone_name)
@property
@pulumi.getter
def method(self) -> pulumi.Input[str]:
"""
The http method of backend service.
"""
return pulumi.get(self, "method")
@method.setter
def method(self, value: pulumi.Input[str]):
pulumi.set(self, "method", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
System parameter name which supports values including in [system parameter list](https://www.alibabacloud.com/help/doc-detail/43677.html)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> pulumi.Input[str]:
"""
The path of backend service.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: pulumi.Input[str]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def timeout(self) -> pulumi.Input[int]:
"""
Backend service time-out time; unit: millisecond.
"""
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: pulumi.Input[int]):
pulumi.set(self, "timeout", value)
@property
@pulumi.getter(name="aoneName")
def aone_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "aone_name")
@aone_name.setter
def aone_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aone_name", value)
@pulumi.input_type
class ApiMockServiceConfigArgs:
def __init__(__self__, *,
result: pulumi.Input[str],
aone_name: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] result: The result of the mock service.
"""
pulumi.set(__self__, "result", result)
if aone_name is not None:
pulumi.set(__self__, "aone_name", aone_name)
@property
@pulumi.getter
def result(self) -> pulumi.Input[str]:
"""
The result of the mock service.
"""
return pulumi.get(self, "result")
@result.setter
def result(self, value: pulumi.Input[str]):
pulumi.set(self, "result", value)
@property
@pulumi.getter(name="aoneName")
def aone_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "aone_name")
@aone_name.setter
def aone_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aone_name", value)
@pulumi.input_type
class ApiRequestConfigArgs:
def __init__(__self__, *,
method: pulumi.Input[str],
mode: pulumi.Input[str],
path: pulumi.Input[str],
protocol: pulumi.Input[str],
body_format: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] method: The http method of backend service.
:param pulumi.Input[str] mode: The mode of the parameters between request parameters and service parameters, which support the values of 'MAPPING' and 'PASSTHROUGH'
:param pulumi.Input[str] path: The path of backend service.
:param pulumi.Input[str] protocol: The protocol of api which supports values of 'HTTP','HTTPS' or 'HTTP,HTTPS'
:param pulumi.Input[str] body_format: The body format of the api, which support the values of 'STREAM' and 'FORM'
"""
pulumi.set(__self__, "method", method)
pulumi.set(__self__, "mode", mode)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "protocol", protocol)
if body_format is not None:
pulumi.set(__self__, "body_format", body_format)
@property
@pulumi.getter
def method(self) -> pulumi.Input[str]:
"""
The http method of backend service.
"""
return pulumi.get(self, "method")
@method.setter
def method(self, value: pulumi.Input[str]):
pulumi.set(self, "method", value)
@property
@pulumi.getter
def mode(self) -> pulumi.Input[str]:
"""
The mode of the parameters between request parameters and service parameters, which support the values of 'MAPPING' and 'PASSTHROUGH'
"""
return pulumi.get(self, "mode")
@mode.setter
def mode(self, value: pulumi.Input[str]):
pulumi.set(self, "mode", value)
@property
@pulumi.getter
def path(self) -> pulumi.Input[str]:
"""
The path of backend service.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: pulumi.Input[str]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol of api which supports values of 'HTTP','HTTPS' or 'HTTP,HTTPS'
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="bodyFormat")
def body_format(self) -> Optional[pulumi.Input[str]]:
"""
The body format of the api, which support the values of 'STREAM' and 'FORM'
"""
return pulumi.get(self, "body_format")
@body_format.setter
def body_format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "body_format", value)
@pulumi.input_type
class ApiRequestParameterArgs:
def __init__(__self__, *,
in_: pulumi.Input[str],
in_service: pulumi.Input[str],
name: pulumi.Input[str],
name_service: pulumi.Input[str],
required: pulumi.Input[str],
type: pulumi.Input[str],
default_value: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] in_: System parameter location; values: 'HEAD' and 'QUERY'.
:param pulumi.Input[str] in_service: Backend service's parameter location; values: BODY, HEAD, QUERY, and PATH.
:param pulumi.Input[str] name: System parameter name which supports values including in [system parameter list](https://www.alibabacloud.com/help/doc-detail/43677.html)
:param pulumi.Input[str] name_service: Backend service's parameter name.
:param pulumi.Input[str] required: Parameter required or not; values: REQUIRED and OPTIONAL.
:param pulumi.Input[str] type: Parameter type which supports values of 'STRING','INT','BOOLEAN','LONG',"FLOAT" and "DOUBLE"
:param pulumi.Input[str] default_value: The default value of the parameter.
:param pulumi.Input[str] description: The description of Constant parameter.
"""
pulumi.set(__self__, "in_", in_)
pulumi.set(__self__, "in_service", in_service)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "name_service", name_service)
pulumi.set(__self__, "required", required)
pulumi.set(__self__, "type", type)
if default_value is not None:
pulumi.set(__self__, "default_value", default_value)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter(name="in")
def in_(self) -> pulumi.Input[str]:
"""
System parameter location; values: 'HEAD' and 'QUERY'.
"""
return pulumi.get(self, "in_")
@in_.setter
def in_(self, value: pulumi.Input[str]):
pulumi.set(self, "in_", value)
@property
@pulumi.getter(name="inService")
def in_service(self) -> pulumi.Input[str]:
"""
Backend service's parameter location; values: BODY, HEAD, QUERY, and PATH.
"""
return pulumi.get(self, "in_service")
@in_service.setter
def in_service(self, value: pulumi.Input[str]):
pulumi.set(self, "in_service", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
System parameter name which supports values including in [system parameter list](https://www.alibabacloud.com/help/doc-detail/43677.html)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nameService")
def name_service(self) -> pulumi.Input[str]:
"""
Backend service's parameter name.
"""
return pulumi.get(self, "name_service")
@name_service.setter
def name_service(self, value: pulumi.Input[str]):
pulumi.set(self, "name_service", value)
@property
@pulumi.getter
def required(self) -> pulumi.Input[str]:
"""
Parameter required or not; values: REQUIRED and OPTIONAL.
"""
return pulumi.get(self, "required")
@required.setter
def required(self, value: pulumi.Input[str]):
pulumi.set(self, "required", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Parameter type which supports values of 'STRING','INT','BOOLEAN','LONG',"FLOAT" and "DOUBLE"
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="defaultValue")
def default_value(self) -> Optional[pulumi.Input[str]]:
"""
The default value of the parameter.
"""
return pulumi.get(self, "default_value")
@default_value.setter
def default_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_value", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of Constant parameter.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class ApiSystemParameterArgs:
def __init__(__self__, *,
in_: pulumi.Input[str],
name: pulumi.Input[str],
name_service: pulumi.Input[str]):
"""
:param pulumi.Input[str] in_: System parameter location; values: 'HEAD' and 'QUERY'.
:param pulumi.Input[str] name: System parameter name which supports values including in [system parameter list](https://www.alibabacloud.com/help/doc-detail/43677.html)
:param pulumi.Input[str] name_service: Backend service's parameter name.
"""
pulumi.set(__self__, "in_", in_)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "name_service", name_service)
@property
@pulumi.getter(name="in")
def in_(self) -> pulumi.Input[str]:
"""
System parameter location; values: 'HEAD' and 'QUERY'.
"""
return pulumi.get(self, "in_")
@in_.setter
def in_(self, value: pulumi.Input[str]):
pulumi.set(self, "in_", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
System parameter name which supports values including in [system parameter list](https://www.alibabacloud.com/help/doc-detail/43677.html)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nameService")
def name_service(self) -> pulumi.Input[str]:
"""
Backend service's parameter name.
"""
return pulumi.get(self, "name_service")
@name_service.setter
def name_service(self, value: pulumi.Input[str]):
pulumi.set(self, "name_service", value)
| 34.195548
| 303
| 0.619555
| 2,541
| 21,509
| 5.105077
| 0.064935
| 0.129741
| 0.14354
| 0.054194
| 0.854379
| 0.783688
| 0.75555
| 0.735816
| 0.691335
| 0.651634
| 0
| 0.003178
| 0.253894
| 21,509
| 628
| 304
| 34.25
| 0.805147
| 0.27184
| 0
| 0.652632
| 1
| 0
| 0.07533
| 0.011462
| 0
| 0
| 0
| 0
| 0
| 1
| 0.215789
| false
| 0
| 0.013158
| 0.007895
| 0.347368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bab77f5f7721bb6785bf9a4938dc9a073de96d3a
| 16,817
|
py
|
Python
|
test/unit/testBatch.py
|
Ghost93/python-sdk
|
24fffc1d7cfb53e5f8548c6afa05f6706411a8ba
|
[
"MIT"
] | 2
|
2021-05-07T16:07:55.000Z
|
2021-12-07T01:52:33.000Z
|
test/unit/testBatch.py
|
Ghost93/python-sdk
|
24fffc1d7cfb53e5f8548c6afa05f6706411a8ba
|
[
"MIT"
] | 5
|
2018-02-13T16:11:51.000Z
|
2021-06-25T10:45:36.000Z
|
test/unit/testBatch.py
|
Ghost93/python-sdk
|
24fffc1d7cfb53e5f8548c6afa05f6706411a8ba
|
[
"MIT"
] | 8
|
2018-06-05T19:20:10.000Z
|
2021-12-07T02:35:34.000Z
|
import sys
import os
import unittest
sys.path.append(os.path.abspath('.'))
import paymentrails.configuration
import paymentrails.batch
from mock import MagicMock, Mock, patch
import paymentrails.exceptions.notFoundException
import paymentrails.exceptions.invalidFieldException
def fake_find(batchId):
if batchId == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None")
if batchId[0:1] != "B":
raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid")
return {"ok":"true","batch":{"id":"B-912Q61G0BRVGC","status":"open","amount":"999.00","totalPayments":1,"currency":"USD","description":"Weekly Payouts on 2017-4-4","sentAt":"null","completedAt":"null","createdAt":"2017-05-04T19:19:38.049Z","updatedAt":"2017-05-15T16:38:21.552Z","payments":{"payments":[{"id":"P-91XQ0U0B1RW5M","recipient":{"id":"R-91XPYX3V2MM1G","referenceId":"jsmith@exafmple.com","email":"jsmith@exafmple.com","name":"John Smith","status":"archived","countryCode":"null"},"method":"paypal","methodDisplay":"PayPal","status":"pending","sourceAmount":"999.00","targetAmount":"100.10","isSupplyPayment":"false","memo":"Do something amazing!","fees":"0.00","recipientFees":"0.00","exchangeRate":"1.000000","processedAt":"null","merchantFees":"0.00","sourceCurrency":"USD","sourceCurrencyName":"US Dollar","targetCurrency":"USD","targetCurrencyName":"US Dollar","compliance":{"status":"pending","checkedAt":"null"}}],"meta":{"page":1,"pages":1,"records":1}}}}
def fake_update(batchId, body):
if batchId == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None")
if batchId[0:1] != "B":
raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid")
if body == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Body cannot be None")
return {"ok": "true", "object": "updated"}
def fake_create(body):
if body == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Body is invalid")
return {"ok": "true"}
def fake_delete(batchId):
if batchId == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None")
if batchId[0:1] != "B":
raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid")
return {"ok": "true", "object": "deleted"}
def fake_search(page,pageSize,term):
if term == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Term cannot be None")
return {"ok":"true","batches":[{"id":"B-91XQ40VT5HF18","status":"open","amount":"900.90","totalPayments":1,"currency":"USD","description":"Weekly Payouts on 2017-4-8","sentAt":"null","completedAt":"null","createdAt":"2017-05-08T18:30:44.905Z","updatedAt":"2017-05-12T18:39:06.125Z"}],"meta":{"page":1,"pages":1,"records":1}}
def fake_summary(batchId):
if batchId == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None")
if batchId[0:1] != "B":
raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid")
return {"ok":"true","batchSummary":{"id":"B-91XQ40VT5HF18","serverTime":"2017-05-16T13:34:52.026Z","status":"open","currency":"USD","description":"Weekly Payouts on 2017-4-8","sentAt":"null","completedAt":"null","createdAt":"2017-05-08T18:30:44.905Z","processed_by":"API","updatedAt":"2017-05-12T18:39:06.125Z","quoteExpiredAt":"null","errors":[],"methods":{"paypal":{"count":1,"value":900.9,"fees":0,"recipientFees":0,"merchantFees":0,"net":900.9,"accountType":"Gateway","displayName":"PayPal"},"bank-transfer":{"count":0,"value":0,"fees":0,"recipientFees":0,"merchantFees":0,"net":0,"accountType":"PaymentRails","displayName":"Bank Transfer"}},"PaymentRailsTotal":{"count":0,"value":0,"fees":0,"recipientFees":0,"merchantFees":0,"net":0},"GatewayTotal":{"count":1,"value":900.9,"fees":0,"recipientFees":0,"merchantFees":0,"net":900.9},"total":{"count":1,"value":900.9,"fees":0,"recipientFees":0,"merchantFees":0,"net":900.9},"merchantBalances":{"GatewayTotal":0,"PaymentRailsTotal":10000},"enoughFunds":"true"}}
def fake_process_batch(batchId):
if batchId == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None")
if batchId[0:1] != "B":
raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid")
return {"ok":"true","batch":{"id":"B-91XQ40VT5HF18","status":"processing","amount":"900.90","totalPayments":1,"currency":"USD","description":"Weekly Payouts on 2017-4-8","sentAt":"2017-05-16T13:41:56.149Z","completedAt":"null","createdAt":"2017-05-08T18:30:44.905Z","updatedAt":"2017-05-16T13:41:56.150Z"}}
def fake_generate_quote(batchId):
if batchId == None:
raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None")
if batchId[0:1] != "B":
raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid")
return {"ok":"true","batch":{"id":"B-91XQ40VT5HF18","status":"open","amount":"900.90","totalPayments":"1","currency":"USD","description":"Weekly Payouts on 2017-4-8","sentAt":"null","completedAt":"null","createdAt":"2017-05-08T18:30:44.905Z","updatedAt":"2017-05-16T13:40:08.098Z"}}
class TestBatch(unittest.TestCase):
public_key = ("public_key")
private_key = ("private_key")
@patch('paymentrails.batch.Batch.find',fake_find)
def test_retrieve_batch(self):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.find("B-912Q61G0BRVGC")
status = {"ok":"true","batch":{"id":"B-912Q61G0BRVGC","status":"open","amount":"999.00","totalPayments":1,"currency":"USD","description":"Weekly Payouts on 2017-4-4","sentAt":"null","completedAt":"null","createdAt":"2017-05-04T19:19:38.049Z","updatedAt":"2017-05-15T16:38:21.552Z","payments":{"payments":[{"id":"P-91XQ0U0B1RW5M","recipient":{"id":"R-91XPYX3V2MM1G","referenceId":"jsmith@exafmple.com","email":"jsmith@exafmple.com","name":"John Smith","status":"archived","countryCode":"null"},"method":"paypal","methodDisplay":"PayPal","status":"pending","sourceAmount":"999.00","targetAmount":"100.10","isSupplyPayment":"false","memo":"Do something amazing!","fees":"0.00","recipientFees":"0.00","exchangeRate":"1.000000","processedAt":"null","merchantFees":"0.00","sourceCurrency":"USD","sourceCurrencyName":"US Dollar","targetCurrency":"USD","targetCurrencyName":"US Dollar","compliance":{"status":"pending","checkedAt":"null"}}],"meta":{"page":1,"pages":1,"records":1}}}}
self.assertEqual(response, status)
@patch('paymentrails.batch.Batch.find',fake_find)
def test_retrieve_batch_InvalidBatchId(self):
with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.find("dddd")
@patch('paymentrails.batch.Batch.find',fake_find)
def test_retrieve_batch_None(self):
with self.assertRaises(paymentrails.exceptions.invalidFieldException.InvalidFieldException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.find(None)
@patch('paymentrails.batch.Batch.update',fake_update)
def test_update_batch(self):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
body = {"update_payments": [
{"id": "P-91XQ0U0B1RW5M", "sourceAmount": 999}]}
response = paymentrails.batch.Batch.update("B-912Q61G0BRVGC", body)
status = {"ok": "true", "object": "updated"}
self.assertEqual(response, status)
@patch('paymentrails.batch.Batch.update',fake_update)
def test_update_batch_InvalidBatchId(self):
with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
body = {"update_payments": [
{"id": "P-91XQ0U0B1RW5M", "sourceAmount": 999}]}
response = paymentrails.batch.Batch.update("ddddd",body)
@patch('paymentrails.batch.Batch.update',fake_update)
def test_update_batch_None_Batch(self):
with self.assertRaises(paymentrails.exceptions.invalidFieldException.InvalidFieldException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
body = {"update_payments": [
{"id": "P-91XQ0U0B1RW5M", "sourceAmount": 999}]}
response = paymentrails.batch.Batch.update(None,body)
@patch('paymentrails.batch.Batch.update',fake_update)
def test_update_batch_None_Body(self):
with self.assertRaises(paymentrails.exceptions.invalidFieldException.InvalidFieldException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.update("B-ddddd",None)
@patch('paymentrails.batch.Batch.delete',fake_delete)
def test_delete_batch(self):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.delete("B-912Q61G0BRVGC")
status = {"ok": "true", "object": "deleted"}
self.assertEqual(response, status)
@patch('paymentrails.batch.Batch.delete',fake_delete)
def test_delete_batch_InvalidBatchId(self):
with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.delete("ddddd")
@patch('paymentrails.batch.Batch.delete',fake_delete)
def test_delete_batch_None_Batch(self):
with self.assertRaises(paymentrails.exceptions.invalidFieldException.InvalidFieldException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.delete(None)
@patch('paymentrails.batch.Batch.search',fake_search)
def test_list_allBatchesWithQueries(self):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.search(1, 10, "f18")
status = {"ok":"true","batches":[{"id":"B-91XQ40VT5HF18","status":"open","amount":"900.90","totalPayments":1,"currency":"USD","description":"Weekly Payouts on 2017-4-8","sentAt":"null","completedAt":"null","createdAt":"2017-05-08T18:30:44.905Z","updatedAt":"2017-05-12T18:39:06.125Z"}],"meta":{"page":1,"pages":1,"records":1}}
self.assertEqual(response, status)
@patch('paymentrails.batch.Batch.search',fake_search)
def test_list_allBatchesWithNone(self):
with self.assertRaises(paymentrails.exceptions.invalidFieldException.InvalidFieldException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.search(1, 10, None)
@patch('paymentrails.batch.Batch.summary',fake_summary)
def test_batch_summary(self):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.summary("B-912Q61G0BRVGC")
status = {"ok":"true","batchSummary":{"id":"B-91XQ40VT5HF18","serverTime":"2017-05-16T13:34:52.026Z","status":"open","currency":"USD","description":"Weekly Payouts on 2017-4-8","sentAt":"null","completedAt":"null","createdAt":"2017-05-08T18:30:44.905Z","processed_by":"API","updatedAt":"2017-05-12T18:39:06.125Z","quoteExpiredAt":"null","errors":[],"methods":{"paypal":{"count":1,"value":900.9,"fees":0,"recipientFees":0,"merchantFees":0,"net":900.9,"accountType":"Gateway","displayName":"PayPal"},"bank-transfer":{"count":0,"value":0,"fees":0,"recipientFees":0,"merchantFees":0,"net":0,"accountType":"PaymentRails","displayName":"Bank Transfer"}},"PaymentRailsTotal":{"count":0,"value":0,"fees":0,"recipientFees":0,"merchantFees":0,"net":0},"GatewayTotal":{"count":1,"value":900.9,"fees":0,"recipientFees":0,"merchantFees":0,"net":900.9},"total":{"count":1,"value":900.9,"fees":0,"recipientFees":0,"merchantFees":0,"net":900.9},"merchantBalances":{"GatewayTotal":0,"PaymentRailsTotal":10000},"enoughFunds":"true"}}
self.assertEqual(response, status)
@patch('paymentrails.batch.Batch.summary',fake_summary)
def test_batch_summary_InvalidBatchId(self):
with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.summary("dddd")
@patch('paymentrails.batch.Batch.summary',fake_summary)
def test_batch_summary_None(self):
with self.assertRaises(paymentrails.exceptions.invalidFieldException.InvalidFieldException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.summary(None)
@patch('paymentrails.batch.Batch.create',fake_create)
@patch('paymentrails.batch.Batch.generate_quote',fake_generate_quote)
@patch('paymentrails.batch.Batch.process_batch',fake_process_batch)
def test_create_batch(self):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
body = {"payments": [{"recipient": {"id": "R-91XQ0PJH39U54"},
"sourceAmount": "65", "memo": "", "sourceCurrency": "CAD"}]}
response = paymentrails.batch.Batch.create(body)
status = {"ok": "true"}
self.assertEqual(response,status)
batchId = 'B-91XQ40VT5HF18'
response = paymentrails.batch.Batch.generate_quote(batchId)
status = {"ok":"true","batch":{"id":"B-91XQ40VT5HF18","status":"open","amount":"900.90","totalPayments":"1","currency":"USD","description":"Weekly Payouts on 2017-4-8","sentAt":"null","completedAt":"null","createdAt":"2017-05-08T18:30:44.905Z","updatedAt":"2017-05-16T13:40:08.098Z"}}
self.assertEqual(response, status)
response = paymentrails.batch.Batch.process_batch(batchId)
status = {"ok":"true","batch":{"id":"B-91XQ40VT5HF18","status":"processing","amount":"900.90","totalPayments":1,"currency":"USD","description":"Weekly Payouts on 2017-4-8","sentAt":"2017-05-16T13:41:56.149Z","completedAt":"null","createdAt":"2017-05-08T18:30:44.905Z","updatedAt":"2017-05-16T13:41:56.150Z"}}
self.assertEqual(response, status)
@patch('paymentrails.batch.Batch.create',fake_create)
def test_create_batch_None(self):
with self.assertRaises(paymentrails.exceptions.invalidFieldException.InvalidFieldException):
paymentrails.configuration.Configuration.set_public_key(TestBatch.public_key)
paymentrails.configuration.Configuration.set_private_key(TestBatch.private_key)
response = paymentrails.batch.Batch.create(None)
if __name__ == '__main__':
unittest.main()
| 78.953052
| 1,023
| 0.72284
| 1,910
| 16,817
| 6.259162
| 0.103665
| 0.055458
| 0.069929
| 0.116604
| 0.929653
| 0.902719
| 0.891928
| 0.886993
| 0.875031
| 0.8578
| 0
| 0.057686
| 0.108343
| 16,817
| 212
| 1,024
| 79.325472
| 0.73958
| 0
| 0
| 0.551351
| 0
| 0
| 0.32039
| 0.069989
| 0
| 0
| 0
| 0
| 0.102703
| 1
| 0.135135
| false
| 0
| 0.043243
| 0
| 0.237838
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2456555c1115ce83a95fd87ce08e724b4772a844
| 167
|
py
|
Python
|
test/matrix/__init__.py
|
thatch/BitSwanPump
|
98a5b8d09f9b59d5361611cee0bd45e7b4c69e3f
|
[
"BSD-3-Clause"
] | 17
|
2019-02-14T09:26:03.000Z
|
2022-03-11T09:23:52.000Z
|
test/matrix/__init__.py
|
thatch/BitSwanPump
|
98a5b8d09f9b59d5361611cee0bd45e7b4c69e3f
|
[
"BSD-3-Clause"
] | 91
|
2019-05-06T18:59:02.000Z
|
2022-01-11T06:22:32.000Z
|
test/matrix/__init__.py
|
thatch/BitSwanPump
|
98a5b8d09f9b59d5361611cee0bd45e7b4c69e3f
|
[
"BSD-3-Clause"
] | 10
|
2019-04-23T08:48:58.000Z
|
2022-02-13T14:24:28.000Z
|
from .test_matrix import *
from .test_named_matrix import *
from .test_geo_matrix import *
from .test_session_matrix import *
from .test_time_window_matrix import *
| 20.875
| 38
| 0.808383
| 25
| 167
| 5
| 0.36
| 0.32
| 0.512
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131737
| 167
| 7
| 39
| 23.857143
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
24623bea8531922c66ddc6152c8be49452608ad6
| 205
|
py
|
Python
|
POP1/mocks/practical-three/test_question6.py
|
silvafj/BBK-MSCCS-2017-18
|
d97b0f8e7434d19a1a4006989c32c4c1deb93842
|
[
"MIT"
] | 1
|
2021-12-29T19:38:56.000Z
|
2021-12-29T19:38:56.000Z
|
POP1/mocks/practical-three/test_question6.py
|
silvafj/BBK-MSCCS-2017-18
|
d97b0f8e7434d19a1a4006989c32c4c1deb93842
|
[
"MIT"
] | null | null | null |
POP1/mocks/practical-three/test_question6.py
|
silvafj/BBK-MSCCS-2017-18
|
d97b0f8e7434d19a1a4006989c32c4c1deb93842
|
[
"MIT"
] | 2
|
2021-04-08T22:58:03.000Z
|
2021-04-09T01:16:51.000Z
|
from question6 import cows, bulls
def test_cows():
assert 0 == cows(1234, 9876)
assert 2 == cows(1234, 9294)
def test_bulls():
assert 0 == bulls(1234, 9876)
assert 2 == bulls(1234, 9912)
| 20.5
| 33
| 0.639024
| 31
| 205
| 4.16129
| 0.451613
| 0.108527
| 0.217054
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234177
| 0.229268
| 205
| 9
| 34
| 22.777778
| 0.582278
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 1
| 0.285714
| true
| 0
| 0.142857
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cee619dab97a0ea5c5567c0bcdf98021f71b4ffe
| 4,065
|
py
|
Python
|
tests/unit/math/distance/test_numpy.py
|
startakovsky/docarray
|
78dd3199d25b3e533cd09643b97359783c193397
|
[
"Apache-2.0"
] | 591
|
2022-01-09T14:39:59.000Z
|
2022-03-31T13:19:39.000Z
|
tests/unit/math/distance/test_numpy.py
|
startakovsky/docarray
|
78dd3199d25b3e533cd09643b97359783c193397
|
[
"Apache-2.0"
] | 210
|
2022-01-10T07:59:29.000Z
|
2022-03-31T14:49:18.000Z
|
tests/unit/math/distance/test_numpy.py
|
startakovsky/docarray
|
78dd3199d25b3e533cd09643b97359783c193397
|
[
"Apache-2.0"
] | 40
|
2022-01-09T14:52:20.000Z
|
2022-03-31T07:59:45.000Z
|
import numpy as np
import pytest
from scipy.sparse import csr_matrix
from docarray.math.distance.numpy import (
cosine,
euclidean,
sparse_cosine,
sparse_euclidean,
sparse_sqeuclidean,
sqeuclidean,
)
@pytest.mark.parametrize(
'x_mat, y_mat, result',
(
(
np.array([[1, 2, 3], [4, 5, 6]]),
np.array([[1, 2, 3], [4, 5, 6]]),
np.array(
[[0.00000000e00, 2.53681537e-02], [2.53681537e-02, 2.22044605e-16]]
),
),
(np.array([[1, 2, 3]]), np.array([[1, 2, 3]]), np.array([[0]])),
(np.array([[0, 0, 0]]), np.array([[0, 0, 0]]), np.array([[0]])),
(np.array([[1, 2, 3]]), np.array([[19, 53, 201]]), np.array([[0.06788693]])),
),
)
def test_cosine(x_mat, y_mat, result):
np.testing.assert_almost_equal(cosine(x_mat, y_mat), result, decimal=3)
@pytest.mark.parametrize(
'x_mat, y_mat, result',
(
(
csr_matrix([[1, 2, 3], [4, 5, 6]]),
csr_matrix([[1, 2, 3], [4, 5, 6]]),
np.array(
[[0.00000000e00, 2.53681537e-02], [2.53681537e-02, 2.22044605e-16]]
),
),
(csr_matrix([[1, 2, 3]]), csr_matrix([[1, 2, 3]]), np.array([[0]])),
(csr_matrix([[0, 0, 0]]), csr_matrix([[0, 0, 0]]), np.array([[np.nan]])),
(
csr_matrix([[1, 2, 3]]),
csr_matrix([[19, 53, 201]]),
np.array([[0.06788693]]),
),
),
)
def test_sparse_cosine(x_mat, y_mat, result):
np.testing.assert_almost_equal(sparse_cosine(x_mat, y_mat), result, decimal=3)
@pytest.mark.parametrize(
'x_mat, y_mat, result',
(
(
np.array([[1, 2, 3], [4, 5, 6]]),
np.array([[1, 2, 3], [4, 5, 6]]),
np.array([[0, 27], [27, 0]]),
),
(np.array([[1, 2, 3]]), np.array([[1, 2, 3]]), np.array([[0]])),
(np.array([[0, 0, 0]]), np.array([[0, 0, 0]]), np.array([[0]])),
(np.array([[1, 2, 3]]), np.array([[19, 53, 201]]), np.array([[42129]])),
),
)
def test_sqeuclidean(x_mat, y_mat, result):
np.testing.assert_almost_equal(sqeuclidean(x_mat, y_mat), result, decimal=3)
@pytest.mark.parametrize(
'x_mat, y_mat, result',
(
(
csr_matrix([[1, 2, 3], [4, 5, 6]]),
csr_matrix([[1, 2, 3], [4, 5, 6]]),
np.array([[0, 27], [27, 0]]),
),
(csr_matrix([[1, 2, 3]]), csr_matrix([[1, 2, 3]]), np.array([[0]])),
(csr_matrix([[0, 0, 0]]), csr_matrix([[0, 0, 0]]), np.array([[0]])),
(csr_matrix([[1, 2, 3]]), csr_matrix([[19, 53, 201]]), np.array([[42129]])),
),
)
def test_sparse_sqeuclidean(x_mat, y_mat, result):
np.testing.assert_almost_equal(sparse_sqeuclidean(x_mat, y_mat), result, decimal=3)
@pytest.mark.parametrize(
'x_mat, y_mat, result',
(
(
np.array([[1, 2, 3], [4, 5, 6]]),
np.array([[1, 2, 3], [4, 5, 6]]),
np.array([[0, 5.19615242], [5.19615242, 0]]),
),
(np.array([[1, 2, 3]]), np.array([[1, 2, 3]]), np.array([[0]])),
(np.array([[0, 0, 0]]), np.array([[0, 0, 0]]), np.array([[0]])),
(np.array([[1, 2, 3]]), np.array([[19, 53, 201]]), np.array([[205.2535018]])),
),
)
def test_euclidean(x_mat, y_mat, result):
np.testing.assert_almost_equal(euclidean(x_mat, y_mat), result, decimal=3)
@pytest.mark.parametrize(
'x_mat, y_mat, result',
(
(
csr_matrix([[1, 2, 3], [4, 5, 6]]),
csr_matrix([[1, 2, 3], [4, 5, 6]]),
np.array([[0, 5.19615242], [5.19615242, 0]]),
),
(csr_matrix([[1, 2, 3]]), csr_matrix([[1, 2, 3]]), np.array([[0]])),
(csr_matrix([[0, 0, 0]]), csr_matrix([[0, 0, 0]]), np.array([[0]])),
(
csr_matrix([[1, 2, 3]]),
csr_matrix([[19, 53, 201]]),
np.array([[205.2535018]]),
),
),
)
def test_sparse_euclidean(x_mat, y_mat, result):
np.testing.assert_almost_equal(sparse_euclidean(x_mat, y_mat), result, decimal=3)
| 32.007874
| 87
| 0.487577
| 596
| 4,065
| 3.177852
| 0.087248
| 0.177402
| 0.047518
| 0.07603
| 0.911827
| 0.911827
| 0.911827
| 0.902323
| 0.868532
| 0.833685
| 0
| 0.137294
| 0.267159
| 4,065
| 126
| 88
| 32.261905
| 0.498489
| 0
| 0
| 0.557522
| 0
| 0
| 0.02952
| 0
| 0
| 0
| 0
| 0
| 0.053097
| 1
| 0.053097
| false
| 0
| 0.035398
| 0
| 0.088496
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23597ef2c16a8d0b6f9a050c0bc516757ae1bf51
| 11,243
|
py
|
Python
|
Chapter05/01-chapter-content/color_map_custom_values.py
|
yaojh01/Mastering-OpenCV-4-with-Python
|
e8f83e314b8ed638edb6515967cfb24361b787af
|
[
"MIT"
] | 2
|
2021-06-29T22:00:47.000Z
|
2021-06-30T02:46:19.000Z
|
Chapter05/01-chapter-content/color_map_custom_values.py
|
yaojh01/Mastering-OpenCV-4-with-Python
|
e8f83e314b8ed638edb6515967cfb24361b787af
|
[
"MIT"
] | null | null | null |
Chapter05/01-chapter-content/color_map_custom_values.py
|
yaojh01/Mastering-OpenCV-4-with-Python
|
e8f83e314b8ed638edb6515967cfb24361b787af
|
[
"MIT"
] | 1
|
2019-10-03T20:34:43.000Z
|
2019-10-03T20:34:43.000Z
|
"""
Example for testing custom colors maps in OpenCV providing all values
"""
# Import required packages:
import cv2
import numpy as np
import matplotlib.pyplot as plt
def apply_custom_colormap_values(im_gray):
"""Applies a custom color map using cv2.applyColorMap()"""
# Create the LUT:
lut = np.zeros((256, 1, 3), dtype=np.uint8)
lut[:, 0, 0] = [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 253, 251, 249, 247, 245, 242, 241, 238, 237, 235, 233, 231, 229, 227, 225,
223, 221, 219, 217, 215, 213, 211, 209, 207, 205, 203, 201, 199, 197, 195, 193, 191, 189, 187, 185,
183, 181, 179, 177, 175, 173, 171, 169, 167, 165, 163, 161, 159, 157, 155, 153, 151, 149, 147, 145,
143, 141, 138, 136, 134, 132, 131, 129, 126, 125, 122, 121, 118, 116, 115, 113, 111, 109, 107, 105,
102, 100, 98, 97, 94, 93, 91, 89, 87, 84, 83, 81, 79, 77, 75, 73, 70, 68, 66, 64, 63, 61, 59, 57,
54, 52, 51, 49, 47, 44, 42, 40, 39, 37, 34, 33, 31, 29, 27, 25, 22, 20, 18, 17, 14, 13, 11, 9, 6, 4,
2, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10]
lut[:, 0, 1] = [200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 198, 196, 194, 192,
190, 188, 186, 184, 182, 180, 178, 176, 174, 171, 169, 167, 165, 163, 161, 159, 157, 155, 153, 151,
149, 147, 145, 143, 141, 139, 137, 135, 133, 131, 129, 127, 125, 123, 121, 119, 117, 115, 113, 111,
109, 107, 105, 103, 101, 99, 97, 95, 93, 91, 89, 87, 85, 83, 82, 80, 78, 76, 74, 72, 70, 68, 66, 64,
62, 60, 58, 56, 54, 52, 50, 48, 46, 44, 42, 40, 38, 36, 34, 32, 30, 28, 26, 24, 22, 20, 18, 16, 14,
12, 10, 8, 6, 4, 2, 0]
lut[:, 0, 2] = [195, 194, 193, 191, 190, 189, 188, 187, 186, 185, 184, 183, 182, 181, 179, 178, 177, 176, 175, 174,
173, 172, 171, 170, 169, 167, 166, 165, 164, 163, 162, 161, 160, 159, 158, 157, 155, 154, 153, 152,
151, 150, 149, 148, 147, 146, 145, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 131, 130,
129, 128, 127, 126, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113, 112, 111, 110,
109, 108, 107, 106, 105, 104, 103, 102, 101, 95, 99, 98, 97, 96, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95]
# Apply color map using cv2.applyColorMap()
im_color = cv2.applyColorMap(im_gray, lut)
return im_color
def apply_custom_colormap_values2(im_gray):
"""Applies a custom color map using cv2.LUT()"""
# Create the LUT:
lut = np.zeros((256, 3), dtype=np.uint8)
lut[:, 0] = [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 253, 251, 249, 247, 245, 242, 241, 238, 237, 235, 233, 231, 229, 227, 225,
223, 221, 219, 217, 215, 213, 211, 209, 207, 205, 203, 201, 199, 197, 195, 193, 191, 189, 187, 185,
183, 181, 179, 177, 175, 173, 171, 169, 167, 165, 163, 161, 159, 157, 155, 153, 151, 149, 147, 145,
143, 141, 138, 136, 134, 132, 131, 129, 126, 125, 122, 121, 118, 116, 115, 113, 111, 109, 107, 105,
102, 100, 98, 97, 94, 93, 91, 89, 87, 84, 83, 81, 79, 77, 75, 73, 70, 68, 66, 64, 63, 61, 59, 57,
54, 52, 51, 49, 47, 44, 42, 40, 39, 37, 34, 33, 31, 29, 27, 25, 22, 20, 18, 17, 14, 13, 11, 9, 6, 4,
2, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10]
lut[:, 1] = [200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 198, 196, 194, 192,
190, 188, 186, 184, 182, 180, 178, 176, 174, 171, 169, 167, 165, 163, 161, 159, 157, 155, 153, 151,
149, 147, 145, 143, 141, 139, 137, 135, 133, 131, 129, 127, 125, 123, 121, 119, 117, 115, 113, 111,
109, 107, 105, 103, 101, 99, 97, 95, 93, 91, 89, 87, 85, 83, 82, 80, 78, 76, 74, 72, 70, 68, 66, 64,
62, 60, 58, 56, 54, 52, 50, 48, 46, 44, 42, 40, 38, 36, 34, 32, 30, 28, 26, 24, 22, 20, 18, 16, 14,
12, 10, 8, 6, 4, 2, 0]
lut[:, 2] = [195, 194, 193, 191, 190, 189, 188, 187, 186, 185, 184, 183, 182, 181, 179, 178, 177, 176, 175, 174,
173, 172, 171, 170, 169, 167, 166, 165, 164, 163, 162, 161, 160, 159, 158, 157, 155, 154, 153, 152,
151, 150, 149, 148, 147, 146, 145, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 131, 130,
129, 128, 127, 126, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113, 112, 111, 110,
109, 108, 107, 106, 105, 104, 103, 102, 101, 95, 99, 98, 97, 96, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
95, 95, 95]
# Apply color map using cv2.LUT():
s0, s1 = im_gray.shape
im_color = np.empty(shape=(s0, s1, 3), dtype=np.uint8)
for i in range(3):
im_color[..., i] = cv2.LUT(im_gray, lut[:, i])
return im_color
def apply_rand_custom_colormap_values(im_gray):
"""Applies a random color map using cv2.applyColorMap()"""
# Create random LUT
lut = np.random.randint(255, size=(256, 1, 3), dtype=np.uint8)
# Apply color map using cv2.applyColorMap()
im_color = cv2.applyColorMap(im_gray, lut)
return im_color
def apply_rand_custom_colormap_values2(im_gray):
"""Applies a random color map using cv2.LUT()"""
# Create random LUT
lut = np.random.randint(255, size=(256, 3), dtype=np.uint8)
# Apply color map using cv2.LUT():
s0, s1 = im_gray.shape
im_color = np.empty(shape=(s0, s1, 3), dtype=np.uint8)
for i in range(3):
im_color[..., i] = cv2.LUT(im_gray, lut[:, i])
return im_color
def show_with_matplotlib(color_img, title, pos):
"""Shows an image using matplotlib capabilities"""
# Convert BGR image to RGB
img_RGB = color_img[:, :, ::-1]
ax = plt.subplot(1, 5, pos)
plt.imshow(img_RGB)
plt.title(title)
plt.axis('off')
# We load the image using cv2.imread() and using 'cv2.IMREAD_GRAYSCALE' argument:
gray_img = cv2.imread('shades.png', cv2.IMREAD_GRAYSCALE)
# create a figure() object with appropriate size and title:
plt.figure(figsize=(12, 2))
plt.suptitle("Custom colormaps providing all values", fontsize=14, fontweight='bold')
# Show image:
show_with_matplotlib(cv2.cvtColor(gray_img, cv2.COLOR_GRAY2BGR), "gray", 1)
# Apply the custom color maps to the grayscale image:
custom_rand_1 = apply_rand_custom_colormap_values(gray_img)
custom_rand_2 = apply_rand_custom_colormap_values2(gray_img)
custom_values_1 = apply_custom_colormap_values(gray_img)
custom_values_2 = apply_custom_colormap_values2(gray_img)
# Display all the resulting images:
show_with_matplotlib(custom_rand_1, "cv2.applyColorMap()", 2)
show_with_matplotlib(custom_rand_2, "cv2.LUT()", 3)
show_with_matplotlib(custom_values_1, "cv2.applyColorMap()", 4)
show_with_matplotlib(custom_values_2, "cv2.LUT()", 5)
# Show the created image:
plt.show()
| 63.880682
| 120
| 0.532598
| 1,995
| 11,243
| 2.956892
| 0.149875
| 0.218342
| 0.325479
| 0.43126
| 0.863875
| 0.831666
| 0.80912
| 0.79149
| 0.78454
| 0.755721
| 0
| 0.50859
| 0.295917
| 11,243
| 175
| 121
| 64.245714
| 0.236609
| 0.074357
| 0
| 0.694915
| 0
| 0
| 0.011005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042373
| false
| 0
| 0.025424
| 0
| 0.101695
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
88e291438e08cfb2c98f2fa5476c02df3a65ada6
| 10,491
|
py
|
Python
|
Algo_Research/Moving_Average.py
|
candypantsnola/QTS_Research
|
d62ecb1ecc5d5429f58b9f566cdd052e491bae28
|
[
"MIT"
] | 20
|
2017-03-05T22:15:29.000Z
|
2022-02-18T11:45:50.000Z
|
Algo_Research/Moving_Average.py
|
candypantsnola/QTS_Research
|
d62ecb1ecc5d5429f58b9f566cdd052e491bae28
|
[
"MIT"
] | null | null | null |
Algo_Research/Moving_Average.py
|
candypantsnola/QTS_Research
|
d62ecb1ecc5d5429f58b9f566cdd052e491bae28
|
[
"MIT"
] | 14
|
2017-03-05T22:49:04.000Z
|
2022-01-02T21:34:19.000Z
|
# an exhaustive research on moving average strategy
import sys
import os
this_dir=os.path.dirname(__file__)
sys.path.append(this_dir+"\..\\")
from Back_Test import *
from Ticker_API import *
from Data_API import *
from Util import *
import Single_Algos
test_start = -2500
test_end = -1
Pricing_Database.lazy_update_data_period = 20
# Test 0 ---------------------------------
#get benchmark on all SP500 underlyers
cache = Cache()
#Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
sp500 = get_snp500()
sp500_by_sector = get_snp500_by_sector()
Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
count = 0
good_keys = []
for k in sp500:
try:
if k == "JEC":
raise Exception('Unadjusted ticker')
cache.get_ticker_data(k)
count += 1
good_keys.append(k)
except:
print( "skipped {}".format(k) )
pass
all_measures = []
all_measures_by_sector = []
all_measures_total ={}
for sector in sp500_by_sector:
sector_measures = {}
sector_counter = 0
for k in sp500_by_sector[sector]:
if k in good_keys:
print(k)
algo = back_test_single("portfolio.buy(ticker)",test_start,test_end,ticker=k)
temp_measure = algo.portfolio.get_measures()
sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
temp_measure['ticker'] = k
all_measures.append(temp_measure)
sector_counter += 1
sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
sector_measures['ticker'] = sector
all_measures_by_sector.append(sector_measures)
all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
all_measures_total['ticker'] = "Avg_SP500"
list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
dict_array_to_csv(list_for_csv,"Bench_Mark_1.csv",fields=['ticker','return','volatility','draw_down','max_draw_down','sharpe'])
#
# Test 1 ---------------------------------
#run moving average on all SP500 underlyers
cache = Cache()
#Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
sp500 = get_snp500()
sp500_by_sector = get_snp500_by_sector()
Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
count = 0
good_keys = []
for k in sp500:
try:
if k == "JEC":
raise Exception('Unadjusted ticker')
cache.get_ticker_data(k)
count += 1
good_keys.append(k)
except:
print( "skipped {}".format(k) )
pass
all_measures = []
all_measures_by_sector = []
all_measures_total ={}
for sector in sp500_by_sector:
sector_measures = {}
sector_counter = 0
for k in sp500_by_sector[sector]:
if k in good_keys:
print(k)
algo = back_test_single(Single_Algos.algos["moving average"],test_start,test_end,ticker=k)
temp_measure = algo.portfolio.get_measures()
sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
temp_measure['ticker'] = k
all_measures.append(temp_measure)
sector_counter += 1
sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
sector_measures['ticker'] = sector
all_measures_by_sector.append(sector_measures)
all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
all_measures_total['ticker'] = "Avg_SP500"
list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
dict_array_to_csv(list_for_csv,"Moving_Average_Result_1.csv",fields=['ticker','return','volatility','draw_down','max_draw_down','sharpe'])
# # Test 2 ---------------------------------
# #run moving average with short selling on all SP500 underlyers
# cache = Cache()
# #Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
# sp500 = get_snp500()
# sp500_by_sector = get_snp500_by_sector()
#
# Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
# count = 0
# good_keys = []
# for k in sp500:
# try:
# if k == "JEC":
# raise Exception('Unadjusted ticker')
# cache.get_ticker_data(k)
# count += 1
# good_keys.append(k)
# except:
# print( "skipped {}".format(k) )
# pass
#
# all_measures = []
# all_measures_by_sector = []
# all_measures_total ={}
# for sector in sp500_by_sector:
# sector_measures = {}
# sector_counter = 0
# for k in sp500_by_sector[sector]:
# if k in good_keys:
# print(k)
# algo = back_test_single(Single_Algos.algos["moving average with short sell"],test_start,test_end,ticker=k)
# temp_measure = algo.portfolio.get_measures()
# sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
# all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
# temp_measure['ticker'] = k
# all_measures.append(temp_measure)
# sector_counter += 1
# sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
# sector_measures['ticker'] = sector
# all_measures_by_sector.append(sector_measures)
# all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
# all_measures_total['ticker'] = "Avg_SP500"
#
# list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
# dict_array_to_csv(list_for_csv,"Moving_Average_With_Short_Sell_Result_1.csv",fields=['ticker','return','volatility','draw_down','sharpe'])
# Test 3 ---------------------------------
#run moving average with support price signal on all SP500 underlyers
cache = Cache()
#Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
sp500 = get_snp500()
sp500_by_sector = get_snp500_by_sector()
Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
count = 0
good_keys = []
for k in sp500:
try:
if k == "JEC":
raise Exception('Unadjusted ticker')
cache.get_ticker_data(k)
count += 1
good_keys.append(k)
except:
print( "skipped {}".format(k) )
pass
all_measures = []
all_measures_by_sector = []
all_measures_total ={}
for sector in sp500_by_sector:
sector_measures = {}
sector_counter = 0
for k in sp500_by_sector[sector]:
if k in good_keys:
print(k)
algo = back_test_single(Single_Algos.algos["moving average with support price"],test_start,test_end,ticker=k)
temp_measure = algo.portfolio.get_measures()
sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
temp_measure['ticker'] = k
all_measures.append(temp_measure)
sector_counter += 1
sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
sector_measures['ticker'] = sector
all_measures_by_sector.append(sector_measures)
all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
all_measures_total['ticker'] = "Avg_SP500"
list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
dict_array_to_csv(list_for_csv,"Moving_Average_With_Support_Price_Result_1.csv",fields=['ticker','return','volatility','draw_down','max_draw_down','sharpe'])
# Test 4 ---------------------------------
#run moving average with support price and volatility signal on all SP500 underlyers
cache = Cache()
#Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
sp500 = get_snp500()
sp500_by_sector = get_snp500_by_sector()
# print(sp500_by_sector['real_estate'])
Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
count = 0
good_keys = []
for k in sp500:
try:
if k == "JEC":
raise Exception('Unadjusted ticker')
cache.get_ticker_data(k)
count += 1
good_keys.append(k)
except:
print( "skipped {}".format(k) )
pass
filtered_keys = [] # In this strategy first we restrict our algo only on less-volatile stocks - note that we apply the vol filter here based on history up to the state date so there is no look forward bias
for k in good_keys:
if volatility(k,test_start - 250, test_start) < 0.25:
filtered_keys.append(k)
all_measures = []
all_measures_by_sector = []
all_measures_total ={}
for sector in sp500_by_sector:
sector_measures = {}
sector_counter = 0
for k in sp500_by_sector[sector]:
if k in filtered_keys:
print(k)
algo = back_test_single(Single_Algos.algos["moving average support volatility"],test_start,test_end,ticker=k)
temp_measure = algo.portfolio.get_measures()
sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
temp_measure['ticker'] = k
all_measures.append(temp_measure)
sector_counter += 1
sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
sector_measures['ticker'] = sector
all_measures_by_sector.append(sector_measures)
all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
all_measures_total['ticker'] = "Avg_SP500"
list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
dict_array_to_csv(list_for_csv,"Moving_Average_Support_Volatility_Result.csv",fields=['ticker','return','volatility','draw_down','max_draw_down','sharpe'])
| 40.821012
| 206
| 0.67801
| 1,457
| 10,491
| 4.571723
| 0.098833
| 0.115598
| 0.096082
| 0.042786
| 0.903618
| 0.892959
| 0.883351
| 0.883351
| 0.883351
| 0.876445
| 0
| 0.022922
| 0.209894
| 10,491
| 256
| 207
| 40.980469
| 0.780673
| 0.29368
| 0
| 0.854651
| 0
| 0
| 0.094156
| 0.019481
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.023256
| 0.040698
| 0
| 0.040698
| 0.046512
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
002756dad3c9229bd6017b5e42a92c9db74dcb8b
| 588
|
py
|
Python
|
bestfitting/src/config/en_config.py
|
guitarmind/HPA-competition-solutions
|
547d53aaca148fdb5f4585526ad7364dfa47967d
|
[
"MIT"
] | null | null | null |
bestfitting/src/config/en_config.py
|
guitarmind/HPA-competition-solutions
|
547d53aaca148fdb5f4585526ad7364dfa47967d
|
[
"MIT"
] | null | null | null |
bestfitting/src/config/en_config.py
|
guitarmind/HPA-competition-solutions
|
547d53aaca148fdb5f4585526ad7364dfa47967d
|
[
"MIT"
] | null | null | null |
external_crop512_focal_slov_hardlog_class_densenet121_dropout_i768_aug2_5folds = [
{'model_name': 'external_crop512_focal_slov_hardlog_class_densenet121_dropout_i768_aug2_5folds',
'fold': fold, 'epoch_name': 'final', 'augment': 'whole_maximum'} for fold in range(5)
]
external_crop1024_focal_slov_hardlog_clean_class_densenet121_large_dropout_i1536_aug2_5folds_f012 = [
{'model_name': 'external_crop1024_focal_slov_hardlog_clean_class_densenet121_large_dropout_i1536_aug2_5folds',
'fold': fold, 'epoch_name': 'final', 'augment': 'whole_maximum'} for fold in range(3)
]
| 58.8
| 114
| 0.818027
| 79
| 588
| 5.443038
| 0.367089
| 0.083721
| 0.148837
| 0.111628
| 0.944186
| 0.944186
| 0.944186
| 0.944186
| 0.944186
| 0.944186
| 0
| 0.098696
| 0.086735
| 588
| 9
| 115
| 65.333333
| 0.702048
| 0
| 0
| 0
| 0
| 0
| 0.455782
| 0.289116
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ccb78517149e2edf7a9d143603ed42930854484d
| 9,938
|
py
|
Python
|
caipirinha/schema.py
|
eubr-bigsea/caipirinha
|
4856fedc77aaa1f7b7ccd145c612fdc332f9b25e
|
[
"Apache-2.0"
] | null | null | null |
caipirinha/schema.py
|
eubr-bigsea/caipirinha
|
4856fedc77aaa1f7b7ccd145c612fdc332f9b25e
|
[
"Apache-2.0"
] | 10
|
2017-02-23T17:04:48.000Z
|
2021-07-09T18:28:53.000Z
|
caipirinha/schema.py
|
eubr-bigsea/caipirinha
|
4856fedc77aaa1f7b7ccd145c612fdc332f9b25e
|
[
"Apache-2.0"
] | 1
|
2020-11-13T22:02:22.000Z
|
2020-11-13T22:02:22.000Z
|
# -*- coding: utf-8 -*-
import datetime
import json
from copy import deepcopy
from marshmallow import Schema, fields, post_load, post_dump, EXCLUDE, INCLUDE
from marshmallow.validate import OneOf
from flask_babel import gettext
from caipirinha.models import *
def partial_schema_factory(schema_cls):
schema = schema_cls(partial=True)
for field_name, field in list(schema.fields.items()):
if isinstance(field, fields.Nested):
new_field = deepcopy(field)
new_field.schema.partial = True
schema.fields[field_name] = new_field
return schema
def translate_validation(validation_errors):
for field, errors in list(validation_errors.items()):
if isinstance(errors, dict):
validation_errors[field] = translate_validation(errors)
else:
validation_errors[field] = [gettext(error) for error in errors]
return validation_errors
def load_json(str_value):
try:
return json.loads(str_value)
except BaseException:
return None
# region Protected
class UserCreateRequestSchema(Schema):
id = fields.Integer(required=True)
login = fields.String(required=True)
name = fields.String(required=True)
# endregion
class BaseSchema(Schema):
@post_dump
def remove_skip_values(self, data, **kwargs):
return {
key: value for key, value in data.items()
if value is not None # Empty lists must be kept!
}
class DashboardListResponseSchema(BaseSchema):
""" JSON serialization schema """
id = fields.Integer(required=True)
title = fields.String(required=True)
created = fields.DateTime(
required=False,
allow_none=True,
missing=datetime.datetime.utcnow,
default=datetime.datetime.utcnow)
updated = fields.DateTime(
required=False,
allow_none=True,
missing=datetime.datetime.utcnow,
default=datetime.datetime.utcnow)
version = fields.Integer(required=True)
task_id = fields.String(required=True)
job_id = fields.Integer(required=True)
configuration = fields.Function(lambda x: load_json(x.configuration))
is_public = fields.Boolean(
required=False,
allow_none=True,
missing=False,
default=False)
hash = fields.String(required=False, allow_none=True)
user = fields.Function(
lambda x: {
"id": x.user_id,
"name": x.user_name,
"login": x.user_login})
workflow = fields.Function(
lambda x: {
"id": x.workflow_id,
"name": x.workflow_name})
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of Dashboard"""
return Dashboard(**data)
class Meta:
ordered = True
unknown = EXCLUDE
class DashboardCreateRequestSchema(BaseSchema):
""" JSON serialization schema """
title = fields.String(required=True)
user_id = fields.Integer(required=True)
user_login = fields.String(required=True)
user_name = fields.String(required=True)
workflow_id = fields.Integer(required=True)
workflow_name = fields.String(required=False, allow_none=True)
task_id = fields.String(required=True)
job_id = fields.Integer(required=True)
configuration = fields.String(required=False, allow_none=True)
is_public = fields.Boolean(
required=False,
allow_none=True,
missing=False,
default=False)
hash = fields.String(required=False, allow_none=True)
visualizations = fields.Nested(
'caipirinha.schema.VisualizationCreateRequestSchema',
allow_none=True,
many=True)
user = fields.Nested(
'caipirinha.schema.UserCreateRequestSchema',
allow_none=True)
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of Dashboard"""
return Dashboard(**data)
class Meta:
ordered = True
unknown = EXCLUDE
class DashboardItemResponseSchema(BaseSchema):
""" JSON serialization schema """
id = fields.Integer(required=True)
title = fields.String(required=True)
created = fields.DateTime(
required=False,
allow_none=True,
missing=datetime.datetime.utcnow,
default=datetime.datetime.utcnow)
updated = fields.DateTime(
required=False,
allow_none=True,
missing=datetime.datetime.utcnow,
default=datetime.datetime.utcnow)
version = fields.Integer(required=True)
task_id = fields.String(required=True)
job_id = fields.Integer(required=True)
configuration = fields.Function(lambda x: load_json(x.configuration))
is_public = fields.Boolean(
required=False,
allow_none=True,
missing=False,
default=False)
hash = fields.String(required=False, allow_none=True)
visualizations = fields.Nested(
'caipirinha.schema.VisualizationItemResponseSchema',
allow_none=True,
many=True)
user = fields.Function(
lambda x: {
"id": x.user_id,
"name": x.user_name,
"login": x.user_login})
workflow = fields.Function(
lambda x: {
"id": x.workflow_id,
"name": x.workflow_name})
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of Dashboard"""
return Dashboard(**data)
class Meta:
ordered = True
unknown = EXCLUDE
class VisualizationCreateRequestSchema(BaseSchema):
""" JSON serialization schema """
task_id = fields.String(required=True)
title = fields.String(required=True)
workflow_id = fields.Integer(required=False, allow_none=True)
job_id = fields.Integer(required=False, allow_none=True)
suggested_width = fields.Integer(
required=False,
allow_none=True,
missing=12,
default=12)
data = fields.String(required=False, allow_none=True)
type = fields.Nested(
'caipirinha.schema.VisualizationTypeCreateRequestSchema',
required=True)
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of Visualization"""
return Visualization(**data)
class Meta:
ordered = True
unknown = EXCLUDE
class VisualizationListResponseSchema(BaseSchema):
""" JSON serialization schema """
id = fields.Integer(required=True)
task_id = fields.String(required=True)
title = fields.String(required=True)
workflow_id = fields.Integer(required=False, allow_none=True)
job_id = fields.Integer(required=False, allow_none=True)
suggested_width = fields.Integer(
required=False,
allow_none=True,
missing=12,
default=12)
data = fields.String(required=False, allow_none=True)
type = fields.Nested(
'caipirinha.schema.VisualizationTypeListResponseSchema',
required=True)
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of Visualization"""
return Visualization(**data)
class Meta:
ordered = True
unknown = EXCLUDE
class VisualizationItemResponseSchema(BaseSchema):
""" JSON serialization schema """
id = fields.Integer(required=True)
task_id = fields.String(required=True)
title = fields.String(required=True)
workflow_id = fields.Integer(required=False, allow_none=True)
job_id = fields.Integer(required=False, allow_none=True)
suggested_width = fields.Integer(
required=False,
allow_none=True,
missing=12,
default=12)
data = fields.String(required=False, allow_none=True)
type = fields.Nested(
'caipirinha.schema.VisualizationTypeItemResponseSchema',
required=True)
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of Visualization"""
return Visualization(**data)
class Meta:
ordered = True
unknown = EXCLUDE
class VisualizationTypeCreateRequestSchema(BaseSchema):
""" JSON serialization schema """
id = fields.Integer(required=True)
icon = fields.String(required=False, allow_none=True)
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of VisualizationType"""
return VisualizationType(**data)
class Meta:
ordered = True
unknown = EXCLUDE
class VisualizationTypeItemResponseSchema(BaseSchema):
""" JSON serialization schema """
id = fields.Integer(required=True)
name = fields.String(required=True)
help = fields.String(required=True)
icon = fields.String(required=False, allow_none=True)
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of VisualizationType"""
return VisualizationType(**data)
class Meta:
ordered = True
unknown = EXCLUDE
class VisualizationTypeListResponseSchema(BaseSchema):
""" JSON serialization schema """
id = fields.Integer(required=True)
name = fields.String(required=True)
help = fields.String(required=True)
icon = fields.String(required=False, allow_none=True)
# noinspection PyUnresolvedReferences
@post_load
def make_object(self, data, **kwargs):
""" Deserialize data into an instance of VisualizationType"""
return VisualizationType(**data)
class Meta:
ordered = True
unknown = EXCLUDE
| 31.153605
| 78
| 0.669149
| 1,060
| 9,938
| 6.168868
| 0.123585
| 0.069735
| 0.094816
| 0.09084
| 0.772901
| 0.754855
| 0.749809
| 0.726258
| 0.726258
| 0.710506
| 0
| 0.001708
| 0.234252
| 9,938
| 318
| 79
| 31.251572
| 0.857556
| 0.110787
| 0
| 0.754167
| 0
| 0
| 0.038325
| 0.034423
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054167
| false
| 0
| 0.029167
| 0.004167
| 0.533333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
9d9d3c37b073921e5bd0fd49bfac20e0963a9d5a
| 5,893
|
py
|
Python
|
tests/strategies/test_transformation.py
|
EMMC-ASBL/otelib
|
a66001f3a8b94fc7a113d25f03ff04ba1e2fbf51
|
[
"MIT"
] | 1
|
2022-01-24T15:18:14.000Z
|
2022-01-24T15:18:14.000Z
|
tests/strategies/test_transformation.py
|
EMMC-ASBL/otelib
|
a66001f3a8b94fc7a113d25f03ff04ba1e2fbf51
|
[
"MIT"
] | 34
|
2022-01-28T16:22:46.000Z
|
2022-03-30T17:07:36.000Z
|
tests/strategies/test_transformation.py
|
EMMC-ASBL/otelib
|
a66001f3a8b94fc7a113d25f03ff04ba1e2fbf51
|
[
"MIT"
] | null | null | null |
"""Tests for `otelib.strategies.transformation`."""
from typing import TYPE_CHECKING
import pytest
if TYPE_CHECKING:
from typing import Callable, Union
from tests.conftest import OTEResponse, ResourceType
def test_create(
mock_ote_response: "OTEResponse",
ids: "Callable[[Union[ResourceType, str]], str]",
server_url: str,
) -> None:
"""Test `Transformation.create()`."""
from otelib.strategies.transformation import Transformation
mock_ote_response(
method="post",
endpoint="/transformation",
return_json={"transformation_id": ids("transformation")},
)
transformation = Transformation(server_url)
assert transformation.id is None
transformation.create(
transformationType="celery/remote",
configuration={"task_name": "test-task", "args": []},
)
assert transformation.id
def test_create_fails(
mock_ote_response: "OTEResponse",
server_url: str,
) -> None:
"""Check `Transformation.create()` raises `ApiError` upon request failure."""
from otelib.exceptions import ApiError
from otelib.strategies.transformation import Transformation
mock_ote_response(
method="post",
endpoint="/transformation",
status_code=500,
return_content=b"Internal Server Error",
)
transformation = Transformation(server_url)
assert transformation.id is None
with pytest.raises(ApiError, match="APIError"):
# `session_id` has a wrong type, the request should fail.
transformation.create(
transformationType="celery/remote",
configuration={"task_name": "test-task", "args": []},
session_id=123,
)
assert transformation.id is None
def test_fetch(
mock_ote_response: "OTEResponse",
ids: "Callable[[Union[ResourceType, str]], str]",
server_url: str,
testdata: "Callable[[Union[ResourceType, str]], dict]",
) -> None:
"""Test `Transformation.fetch()`."""
import json
from otelib.strategies.transformation import Transformation
mock_ote_response(
method="post",
endpoint="/transformation",
return_json={"transformation_id": ids("transformation")},
)
mock_ote_response(
method="get",
endpoint=f"/transformation/{ids('transformation')}",
return_json=testdata("transformation"),
)
transformation = Transformation(server_url)
# We must first create the resource - getting a resource ID
transformation.create(
transformationType="celery/remote",
configuration={"task_name": "test-task", "args": []},
)
content = transformation.fetch(session_id=None)
assert json.loads(content) == testdata("transformation")
def test_fetch_fails(
mock_ote_response: "OTEResponse",
ids: "Callable[[Union[ResourceType, str]], str]",
server_url: str,
) -> None:
"""Check `Transformation.fetch()` raises `ApiError` upon request failure."""
from otelib.exceptions import ApiError
from otelib.strategies.transformation import Transformation
mock_ote_response(
method="post",
endpoint="/transformation",
return_json={"transformation_id": ids("transformation")},
)
mock_ote_response(
method="get",
endpoint=f"/transformation/{ids('transformation')}",
status_code=500,
return_content=b"Internal Server Error",
)
transformation = Transformation(server_url)
# We must first create the resource - getting a resource ID
transformation.create(
transformationType="celery/remote",
configuration={"task_name": "test-task", "args": []},
)
with pytest.raises(ApiError, match="APIError"):
# `session_id` has a wrong type, the request should fail.
transformation.fetch(session_id=123)
def test_initialize(
mock_ote_response: "OTEResponse",
ids: "Callable[[Union[ResourceType, str]], str]",
server_url: str,
) -> None:
"""Test `Transformation.fetch()`."""
import json
from otelib.strategies.transformation import Transformation
mock_ote_response(
method="post",
endpoint="/transformation",
return_json={"transformation_id": ids("transformation")},
)
mock_ote_response(
method="post",
endpoint=f"/transformation/{ids('transformation')}/initialize",
return_json={},
)
transformation = Transformation(server_url)
# We must first create the resource - getting a resource ID
transformation.create(
transformationType="celery/remote",
configuration={"task_name": "test-task", "args": []},
)
content = transformation.initialize(session_id=None)
assert json.loads(content) == {}
def test_initialize_fails(
mock_ote_response: "OTEResponse",
ids: "Callable[[Union[ResourceType, str]], str]",
server_url: str,
) -> None:
"""Check `Transformation.fetch()` raises `ApiError` upon request failure."""
from otelib.exceptions import ApiError
from otelib.strategies.transformation import Transformation
mock_ote_response(
method="post",
endpoint="/transformation",
return_json={"transformation_id": ids("transformation")},
)
mock_ote_response(
method="post",
endpoint=f"/transformation/{ids('transformation')}/initialize",
status_code=500,
return_content=b"Internal Server Error",
)
transformation = Transformation(server_url)
# We must first create the resource - getting a resource ID
transformation.create(
transformationType="celery/remote",
configuration={"task_name": "test-task", "args": []},
)
with pytest.raises(ApiError, match="APIError"):
# `session_id` has a wrong type, the request should fail.
transformation.initialize(session_id=123)
| 28.468599
| 81
| 0.666893
| 597
| 5,893
| 6.437186
| 0.134003
| 0.029144
| 0.062451
| 0.075462
| 0.885246
| 0.86625
| 0.857143
| 0.838928
| 0.838928
| 0.814728
| 0
| 0.003873
| 0.211268
| 5,893
| 206
| 82
| 28.606796
| 0.822935
| 0.127948
| 0
| 0.734266
| 0
| 0
| 0.215617
| 0.06906
| 0
| 0
| 0
| 0
| 0.041958
| 1
| 0.041958
| false
| 0
| 0.104895
| 0
| 0.146853
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d184c844ff8552682a82cb1beabfa2a43bfbb4b6
| 94
|
py
|
Python
|
src/OptionFlags.py
|
riferg206/aws-programmatic-tester
|
6f04ca9fb983caf1fb119c57775560a848d160df
|
[
"Apache-2.0"
] | null | null | null |
src/OptionFlags.py
|
riferg206/aws-programmatic-tester
|
6f04ca9fb983caf1fb119c57775560a848d160df
|
[
"Apache-2.0"
] | null | null | null |
src/OptionFlags.py
|
riferg206/aws-programmatic-tester
|
6f04ca9fb983caf1fb119c57775560a848d160df
|
[
"Apache-2.0"
] | null | null | null |
import base64
def decode_b64(encoded_data):
return base64.b64decode(encoded_data)
| 11.75
| 41
| 0.744681
| 12
| 94
| 5.583333
| 0.75
| 0.328358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0.191489
| 94
| 7
| 42
| 13.428571
| 0.776316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d18d4909e2cd23bedf5ab5443fbf5644fc867dac
| 107
|
py
|
Python
|
horch/models/cifar/iresnet/__init__.py
|
sbl1996/pytorch-hrvvi-ext
|
f19abcbedd844a700b2e2596dd817ea80cbb6287
|
[
"MIT"
] | 17
|
2019-05-14T10:47:25.000Z
|
2021-06-09T05:39:47.000Z
|
horch/models/cifar/iresnet/__init__.py
|
sbl1996/pytorch-hrvvi-ext
|
f19abcbedd844a700b2e2596dd817ea80cbb6287
|
[
"MIT"
] | null | null | null |
horch/models/cifar/iresnet/__init__.py
|
sbl1996/pytorch-hrvvi-ext
|
f19abcbedd844a700b2e2596dd817ea80cbb6287
|
[
"MIT"
] | 5
|
2019-08-08T07:04:38.000Z
|
2020-07-04T08:58:32.000Z
|
from horch.models.cifar.iresnet.resnet import ResNet
from horch.models.cifar.iresnet.resnext import ResNeXt
| 53.5
| 54
| 0.859813
| 16
| 107
| 5.75
| 0.5
| 0.195652
| 0.326087
| 0.434783
| 0.586957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065421
| 107
| 2
| 54
| 53.5
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d1c65f8cbf8498e75d29e62821792a4961484341
| 33,614
|
py
|
Python
|
examples/parametered_grid.py
|
abadithela/NFM2021_Static_Test_Synthesis
|
2e7ad9c08b6b79706ea5124ee7fa7d3b224cf64a
|
[
"MIT"
] | null | null | null |
examples/parametered_grid.py
|
abadithela/NFM2021_Static_Test_Synthesis
|
2e7ad9c08b6b79706ea5124ee7fa7d3b224cf64a
|
[
"MIT"
] | null | null | null |
examples/parametered_grid.py
|
abadithela/NFM2021_Static_Test_Synthesis
|
2e7ad9c08b6b79706ea5124ee7fa7d3b224cf64a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 22 20:01:37 2020
@author: apurvabadithelSETT"""
import networkx as nx
import time
import pickle as pkl
import random
import os
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.pyplot import cm
from networkx.algorithms.flow import shortest_augmenting_path, edmonds_karp
import sys
sys.path.append('../')
from src.grid_functions import construct_grid, plot_static_map, base_grid, plot_final_map, plot_augment_paths
from src.simulation_helpers import run_iterations, run_iterations_SAPs, run_iterations_random_graph
from time import gmtime, strftime
plt.rcParams['mathtext.fontset'] = 'custom'
plt.rcParams['mathtext.rm'] = 'Bitstream Vera Sans'
plt.rcParams['mathtext.it'] = 'Bitstream Vera Sans:italic'
plt.rcParams['mathtext.bf'] = 'Bitstream Vera Sans:bold'
#===============================================================================================#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Plot Figures ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
#===============================================================================================#
# Plotting Figures:
def plot_parametrized_figure(KEY, t, P, no_diag_data, diag_data=None):
fig, ax = plt.subplots()
RUNTIME = no_diag_data[0]
RUNTIME_STD = no_diag_data[1]
TIME_OUT = no_diag_data[2]
ITER = no_diag_data[3]
ILP_ITER = no_diag_data[4]
Pk = P[0] # largest Pk
color=iter(cm.rainbow(np.linspace(0,1,len(Pk)))) # Colors
color = ['b', 'g']
max_y = 0
max_yerr = 0
xlabels = [str(tuple(ti)) for ti in t]
transform = [-0.05, 0, 0.05]
for jj in range(len(Pk)):
y = []
x = []
xticks = []
space = transform[jj]
yerr= []
c = color[jj]
for ii in range(len(t)):
R_ii = RUNTIME[ii]
Rt = RUNTIME_STD[ii]
Tout_ii = TIME_OUT[ii]
N_ii = ITER[ii]
N_ilp_ii = ILP_ITER[ii]
Pii = P[ii]
assert(len(R_ii) == len(Tout_ii))
if jj < len(R_ii):
x.append(ii+1 + space)
xticks.append(ii+1)
y.append(R_ii[jj])
yerr.append(Rt[jj])
if max(y) > max_y:
max_y = max(y)
if max(yerr)> max_yerr:
max_yerr = max(yerr)
yerr= 0.434*np.array(yerr/y)
plt.errorbar(x, y, yerr=yerr, c = c, alpha=.75, fmt=':', capsize=3, capthick=1, ls ='-', marker='o', linewidth=2, markersize=3, label=r'$|P| = $%s' % str(Pk[jj])) # total no. of props (including goal)
# plt.errorbar(x,y,yerr=yerr)
ax.set_xticks(xticks)
ax.set_xticklabels(tuple(xlabels))
plt.yscale('log', nonposy='clip')
# ax.set_yticks(np.arange(0, int(max_y+max_yerr)), (max_y+max_yerr)//1)
if diag_data:
RUNTIME = diag_data[0]
RUNTIME_STD = diag_data[1]
TIME_OUT = diag_data[2]
ITER = diag_data[3]
ILP_ITER = diag_data[4]
Pk = P[0] # largest Pk
color=iter(cm.rainbow(np.linspace(0,1,2*len(Pk)))) # Colors
max_y = 0
max_yerr=0
for jj in range(len(Pk)):
y = []
x = []
xticks = []
space = transform[jj]
yerr = []
c = 'o--'+str(next(color))
for ii in range(len(t)):
R_ii = RUNTIME[ii]
Rt = RUNTIME_STD[ii]
Tout_ii = TIME_OUT[ii]
N_ii = ITER[ii]
N_ilp_ii = ILP_ITER[ii]
Pii = P[ii]
assert(len(R_ii) == len(Tout_ii))
if jj < len(R_ii):
x.append(ii+1 + space)
xticks.append(ii+1)
y.append(R_ii[jj])
yerr.append(Rt[jj])
if max(y) > max_y:
max_y = max(y)
if max(yerr)> max_yerr:
max_yerr = max(yerr)
plt.errorbar(x, y, yerr = yerr, c =c, linewidth=2, markersize=2, label=r'diag: $|P| = $%s' % str(Pk[jj]+1))
#ax.plot(x, y, yerr=yerr)
plt.legend()
ax.set(xlabel='Grid size (M,N)', ylabel='Runtime (s)')
# fig.savefig(KEY+".csv")
fig.savefig(KEY+".png", dpi=300)
return fig, ax
# Run parametrized figure for square grids:
def plot_parametrized_figure_square_grids(KEY, t, P, no_diag_data, diag_data=None):
fig, ax = plt.subplots()
RUNTIME = no_diag_data[0]
RUNTIME_STD = no_diag_data[1]
TIME_OUT = no_diag_data[2]
ITER = no_diag_data[3]
ILP_ITER = no_diag_data[4]
Pk = P[0] # largest Pk
color=iter(cm.rainbow(np.linspace(0,1,len(Pk)))) # Colors
color = ['b', 'g', 'm']
max_y = 0
max_yerr = 0
xlabels = t
transform = [-0.05, 0, 0.05]
for jj in range(len(Pk)):
y = []
x = []
yerr=[]
xticks = []
space = transform[jj]
c = color[jj]
for ii in range(len(t)):
R_ii = RUNTIME[ii]
Tout_ii = TIME_OUT[ii]
N_ii = ITER[ii]
N_ilp_ii = ILP_ITER[ii]
Pii = P[ii]
Rt = RUNTIME_STD[ii]
assert(len(R_ii) == len(Tout_ii))
if jj < len(R_ii):
x.append(ii+1 + space)
xticks.append(ii+1)
y.append(R_ii[jj])
yerr.append(Rt[jj])
if max(y) > max_y:
max_y = max(y)
if max(yerr)> max_yerr:
max_yerr = max(yerr)
plt.errorbar(x, y, yerr = yerr, c = c, alpha=.75, fmt=':', capsize=3, capthick=1, ls ='-', marker='o', linewidth=2, markersize=3, label=r'$|P| = $%s' % str(Pk[jj])) # total no. of props (including goal)
# plt.errorbar(x,y,yerr=yerr)
ax.set_xticks(xticks)
ax.set_xticklabels(t)
plt.yscale('log', nonposy='clip')
# ax.set_yticks(np.arange(0, int(max_y+max_yerr)), (max_y+max_yerr)//1)
if diag_data:
RUNTIME = diag_data[0]
RUNTIME_STD = diag_data[1]
TIME_OUT = diag_data[2]
ITER = diag_data[3]
ILP_ITER = diag_data[4]
Pk = P[0] # largest Pk
color=iter(cm.rainbow(np.linspace(0,1,2*len(Pk)))) # Colors
for jj in range(len(Pk)):
y = []
x = []
yerr= []
xticks = []
space = transform[jj]
c = 'o--'+str(next(color))
for ii in range(len(t)):
R_ii = RUNTIME[ii]
Rt = RUNTIME_STD[ii]
Tout_ii = TIME_OUT[ii]
N_ii = ITER[ii]
N_ilp_ii = ILP_ITER[ii]
Pii = P[ii]
assert(len(R_ii) == len(Tout_ii))
if jj < len(R_ii):
x.append(ii+1+space)
xticks.append(ii+1)
y.append(R_ii[jj])
yerr.append(Rt[jj])
plt.errorbar(x, y, yerr = yerr, c = c, alpha=.75, fmt=':', capsize=3, capthick=1, ls ='-', marker='o', linewidth=2, markersize=3, label=r'$|P| = $%s' % str(Pk[jj]))
plt.legend()
ax.set(xlabel='Grid size (t)', ylabel='Runtime (s)')
# fig.savefig(KEY+".csv")
fig.savefig(KEY+".png")
return fig, ax
# Plotting Figures:
def plot_parametrized_figure_rg(KEY, M, N, P, no_diag_data, diag_data=None):
fig, ax = plt.subplots()
RUNTIME = no_diag_data[0]
TIME_OUT = no_diag_data[1]
ITER = no_diag_data[2]
ILP_ITER = no_diag_data[3]
Pk = P[0] # largest Pk
color=iter(cm.rainbow(np.linspace(0,1,len(Pk)))) # Colors
color = iter(['b', 'k'])
for jj in range(len(Pk)):
y = []
x = []
c = next(color)
for ii in range(len(M)):
R_ii = RUNTIME[ii]
Tout_ii = TIME_OUT[ii]
N_ii = ITER[ii]
N_ilp_ii = ILP_ITER[ii]
Pii = P[ii]
assert(len(R_ii) == len(Tout_ii))
if jj < len(R_ii):
x.append(M[ii])
y.append(R_ii[jj])
ax.plot(x, y, c = c, ls ='-', marker='o', linewidth=2, markersize=2, label=r'$|P| = $%s' % str(Pk[jj]+1))
ax.set_title(N+" edges")
ax.set_xticks(M)
ax.set_yticks(np.arange(0, int(max(y))+1, 0.5))
if diag_data:
RUNTIME = diag_data[0]
TIME_OUT = diag_data[1]
ITER = diag_data[2]
ILP_ITER = diag_data[3]
Pk = P[0] # largest Pk
color=iter(cm.rainbow(np.linspace(0,1,2*len(Pk)))) # Colors
for jj in range(len(Pk)):
y = []
x = []
c = 'o--'+str(next(color))
for ii in range(len(M)):
R_ii = RUNTIME[ii]
Tout_ii = TIME_OUT[ii]
N_ii = ITER[ii]
N_ilp_ii = ILP_ITER[ii]
Pii = P[ii]
assert(len(R_ii) == len(Tout_ii))
if jj < len(R_ii):
x.append(M[ii])
y.append(R_ii[jj])
ax.plot(x, y, c, linewidth=2, markersize=2, label=r'diag: $|P| = $%s' % str(Pk[jj]+1))
plt.legend()
ax.set(xlabel='Number of nodes (n)', ylabel='Runtime (s)')
# fig.savefig(KEY+".csv")
name = "Data/random_"+str(KEY)+"_"+str(N)+"_"
figname = name + "#"+".png"
fig.savefig(figname, dpi = 300)
return fig, ax
# Function to save data:
def save_data(KEY, t, P, no_diag_data, diag_data=None):
current_directory = os.getcwd()
name = "Data/New/#/param_grid_"+str(KEY)
name = name.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
final_directory = os.path.join(current_directory, name)
if not os.path.exists(final_directory):
os.makedirs(final_directory)
# =============== No diagonal data ============================== #
ft = name + "_rows"+".dat"
# ft = ft.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(t, open(ft,"wb"))
# ft.close()
fP = name + "_"+"P"+".dat"
# fP = fP.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(P, open(fP,"wb"))
# fP.close()
ftime_avg = name + "_"+"time_avg"+".dat"
# ftime_avg = ftime_avg.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[0], open(ftime_avg,"wb"))
# ftime_avg.close()
TIME_AVG_STD_DEV = name + "_"+"time_avg_std_dev"+".dat"
# ftimed_out = ftimed_out.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[1], open(TIME_AVG_STD_DEV,"wb"))
ftimed_out = name + "_"+"timed_out"+".dat"
# ftimed_out = ftimed_out.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[2], open(ftimed_out,"wb"))
# ftimed_out.close()
ftotal_iter = name+ "_"+"total_iter"+".dat"
# ftotal_iter = ftotal_iter.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[3], open(ftotal_iter,"wb"))
# ftotal_iter.close()
fILP_iter = name+ "_"+"ILP"+".dat"
# fILP_iter = fILP_iter.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[4], open(fILP_iter,"wb"))
# fILP_iter.close()
# =============== Diagonal data ============================== #
if diag_data:
name = "Data/#/DIAG_param_grid_"+str(KEY)
name = name.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
ft = name + "_"+"t"+".dat"
# ft = ft.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(t, open(ft,"wb"))
# ft.close()
fP = name + "_"+"P"+".dat"
# fP = fP.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(P, open(fP,"wb"))
# fP.close()
ftime_avg = name + "_"+"time_avg"+".dat"
# ftime_avg = ftime_avg.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(diag_data[0], open(ftime_avg,"wb"))
# ftime_avg.close()
TIME_AVG_STD_DEV = name + "_"+"time_avg_std_dev"+".dat"
# ftimed_out = ftimed_out.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[1], open(TIME_AVG_STD_DEV,"wb"))
ftimed_out = name + "_"+"timed_out"+".dat"
# ftimed_out = ftimed_out.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[2], open(ftimed_out,"wb"))
ftotal_iter = name+ "_"+"total_iter"+".dat"
# ftotal_iter = ftotal_iter.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(diag_data[3], open(ftotal_iter,"wb"))
# ftotal_iter.close()
fILP_iter = name+ "_"+"ILP"+".dat"
# fILP_iter = fILP_iter.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(diag_data[4], open(fILP_iter,"wb"))
# fILP_iter.close()
# Function to save data for random graphs:
def save_data_random_graph(KEY, M, N, P, no_diag_data, diag_data=None):
name = "Data/param_grid_"+str(KEY)+"_"+str(N)
# =============== No diagonal data ============================== #
ft = name + "_"+"grids"+"_#"+".dat"
ft = ft.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(M, open(ft,"wb"))
# ft.close()
fP = name + "_"+"P"+"_#"+".dat"
fP = fP.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(P, open(fP,"wb"))
# fP.close()
ftime_avg = name + "_"+"time_avg"+"_#"+".dat"
ftime_avg = ftime_avg.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[0], open(ftime_avg,"wb"))
# ftime_avg.close()
ftimed_out = name + "_"+"timed_out"+"_#"+".dat"
ftimed_out = ftimed_out.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[1], open(ftimed_out,"wb"))
# ftimed_out.close()
ftotal_iter = name+ "_"+"total_iter"+"_#"+".dat"
ftotal_iter = ftotal_iter.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[2], open(ftotal_iter,"wb"))
# ftotal_iter.close()
fILP_iter = name+ "_"+"ILP"+"_#"+".dat"
fILP_iter = fILP_iter.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(no_diag_data[3], open(fILP_iter,"wb"))
# fILP_iter.close()
# =============== Diagonal data ============================== #
if diag_data:
name = "param_grid_"+str(KEY)+"_"+str(N)
ft = name + "_"+"M"+"_#"+".dat"
ft = ft.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(M, open(ft,"wb"))
# ft.close()
fP = name + "_"+"P"+"_#"+".dat"
fP = fP.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(P, open(fP,"wb"))
# fP.close()
ftime_avg = name + "_"+"time_avg"+"_#"+".dat"
ftime_avg = ftime_avg.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(diag_data[0], open(ftime_avg,"wb"))
# ftime_avg.close()
ftimed_out = name + "_"+"timed_out"+"_#"+".dat"
ftimed_out = ftimed_out.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(diag_data[1], open(ftimed_out,"wb"))
# ftimed_out.close()
ftotal_iter = name+ "_"+"total_iter"+"_#"+".dat"
ftotal_iter = ftotal_iter.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(diag_data[2], open(ftotal_iter,"wb"))
# ftotal_iter.close()
fILP_iter = name+ "_"+"ILP"+"_#"+".dat"
fILP_iter = fILP_iter.replace("#", strftime("%Y_%m_%d_%H_%M_%S", gmtime()))
pkl.dump(diag_data[3], open(fILP_iter,"wb"))
# fILP_iter.close()
# ==============================================================================================#
# Parametrized gridworld iterative solver:
#===============================================================================================#
# Running SAP_augmented_paths constraints:
def run_SAPs(diag):
Niter = 50 # No. of iterations for each example
# t = [3,4,5,6,7, 8, 9] # Grid size
t = [4,5,6,7,8] # Grid size
lM = len(t)
lN = len(t)
P = [[3,4] for ii in range(len(t))]
time_arr_no_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
timed_out_num_no_diag = [[] for iM in range(lM)]# Stores no. of infeasible iterations
time_avg_std_no_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
time_avg_std_diag = [[] for iM in range(lM)]# Stores no. of infeasible iterations
time_arr_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
timed_out_num_diag = [[] for iM in range(lM)] # Stores no. of infeasible iterations
fail_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
fail_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
total_iterations_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
total_iterations_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
total_iterations_ILP_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
total_iterations_ILP_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
KEY = "SAP" # Use only SAP constraints
for ii in range(lM):
iM = t[ii]
iN = t[ii]
nstates = iM*iN
nprops = P[ii]
nP = len(nprops)# No. of propositions
time_arr_iM_iN_no_diag = [0 for inP in range(nP)]
time_avg_std_iM_iN_no_diag = [0 for inP in range(nP)]
timed_out_iM_iN_no_diag = [0 for inP in range(nP)]
total_iteration_iM_iN_no_diag = [0 for inP in range(nP)]
total_ILP_iteration_iM_iN_no_diag = [0 for inP in range(nP)]
if diag:
time_arr_iM_iN_diag = [0 for inP in range(nP)]
time_avg_std_iM_iN_diag = [0 for inP in range(nP)]
timed_out_iM_iN_diag = [0 for inP in range(nP)]
fail_iM_iN_diag = [0 for inP in range(nP)]
fail_iM_iN_no_diag = [0 for inP in range(nP)]
total_iteration_iM_iN_diag = [0 for inP in range(nP)]
total_ILP_iteration_iM_iN_diag = [0 for inP in range(nP)]
for inP in range(len(nprops)):
n_inP = nprops[inP]
print("Computing data for t = "+str(iM)+" and nprops = "+str(n_inP))
time_avg, timed_out_avg, total_iter_avg, total_ILP_avg, time_avg_std = run_iterations_SAPs(iM, iN, n_inP, Niter, False, KEY) # Nodiagonal transitions
time_avg_std_iM_iN_no_diag[inP-1] = time_avg_std
time_arr_iM_iN_no_diag[inP-1] = time_avg
timed_out_iM_iN_no_diag[inP-1] = timed_out_avg
total_iteration_iM_iN_no_diag[inP-1] = total_iter_avg
total_ILP_iteration_iM_iN_no_diag[inP-1] = total_ILP_avg
if diag:
time_avg, timed_out_avg, total_iter_avg, total_ILP_avg, time_avg_std = run_iterations_SAPs(iM, iN, inP, Niter, True, KEY) # Nodiagonal transitions
time_arr_iM_iN_diag[inP-1] = time_avg
time_avg_std_iM_iN_diag[inP-1] = time_avg_std
timed_out_iM_iN_diag[inP-1] = timed_out_avg
total_iteration_iM_iN_diag[inP-1] = total_iter_avg
total_ILP_iteration_iM_iN_no_diag[inP-1] = total_ILP_avg
time_arr_no_diag[ii] = time_arr_iM_iN_no_diag.copy()
time_avg_std_no_diag[ii] = time_avg_std_iM_iN_no_diag.copy()
timed_out_num_no_diag[ii] = timed_out_iM_iN_no_diag.copy()
total_iterations_no_diag[ii] = total_iteration_iM_iN_no_diag.copy()
total_iterations_ILP_no_diag[ii] = total_iteration_iM_iN_no_diag.copy()
if diag:
time_arr_diag[ii] = time_arr_iM_iN_diag.copy()
time_avg_std_diag[ii] = time_avg_std_iM_iN_diag.copy()
timed_out_num_diag[ii] = timed_out_iM_iN_diag.copy()
total_iterations_diag[ii] = total_iteration_iM_iN_diag.copy()
total_iterations_ILP_diag[ii] = total_iteration_iM_iN_diag.copy()
# no diagonals:
if diag:
save_data(KEY, t[ii], P, [time_arr_iM_iN_no_diag, time_avg_std_iM_iN_no_diag, timed_out_iM_iN_no_diag, total_iteration_iM_iN_no_diag, total_ILP_iteration_iM_iN_no_diag],[time_arr_iM_iN_diag, time_avg_std_iM_iN_diag, timed_out_iM_iN_diag, total_iteration_iM_iN_diag, total_ILP_iteration_iM_iN_diag])
else:
save_data(KEY, t[ii], P, [time_arr_iM_iN_no_diag, time_avg_std_iM_iN_no_diag, timed_out_iM_iN_no_diag, total_iteration_iM_iN_no_diag, total_ILP_iteration_iM_iN_no_diag]) # ,[time_arr_diag, time_avg_std_diag, timed_out_num_diag, total_iterations_diag, total_iterations_ILP_diag])
if diag:
fig, ax = plot_parametrized_figure_square_grids(KEY, t, P, [time_arr_no_diag, time_avg_std_no_diag, timed_out_num_no_diag, total_iterations_no_diag, total_iterations_ILP_no_diag],[time_arr_diag, time_avg_std_diag, timed_out_num_diag, total_iterations_diag, total_iterations_ILP_diag])
else:
fig, ax = plot_parametrized_figure_square_grids(KEY, t, P, [time_arr_no_diag, time_avg_std_no_diag, timed_out_num_no_diag, total_iterations_no_diag, total_iterations_ILP_no_diag]) #,[time_arr_diag, time_avg_std_diag, timed_out_num_diag, total_iterations_diag, total_iterations_ILP_diag])
return fig, ax
# Running ALL_augmented_paths constraints:
def run_ALL(diag):
Niter = 50 # No. of iterations for each example
t = [[3,3],[3,4],[3,5],[4,4]] # Grid size
# t = [[3,3],[3,4]]
lM = len(t)
lN = len(t)
P = [[2,3], [2,3], [2,3], [2,3], [2,3]]
time_arr_no_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
timed_out_num_no_diag = [[] for iM in range(lM)]# Stores no. of infeasible iterations
time_avg_std_no_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
time_avg_std_diag = [[] for iM in range(lM)]# Stores no. of infeasible iterations
time_arr_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
timed_out_num_diag = [[] for iM in range(lM)] # Stores no. of infeasible iterations
fail_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
fail_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
total_iterations_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
total_iterations_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
total_iterations_ILP_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
total_iterations_ILP_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
KEY = "ALL" # Use all simple path constraints:
for ii in range(lM):
iM = t[ii][0]
iN = t[ii][1]
nstates = iM*iN
nprops = P[ii]
nP = len(nprops)# No. of propositions
time_arr_iM_iN_no_diag = [0 for inP in range(nP)]
time_avg_std_iM_iN_no_diag = [0 for inP in range(nP)]
timed_out_iM_iN_no_diag = [0 for inP in range(nP)]
time_arr_iM_iN_diag = [0 for inP in range(nP)]
time_avg_std_iM_iN_diag = [0 for inP in range(nP)]
timed_out_iM_iN_diag = [0 for inP in range(nP)]
fail_iM_iN_diag = [0 for inP in range(nP)]
fail_iM_iN_no_diag = [0 for inP in range(nP)]
total_iteration_iM_iN_diag = [0 for inP in range(nP)]
total_iteration_iM_iN_no_diag = [0 for inP in range(nP)]
total_ILP_iteration_iM_iN_diag = [0 for inP in range(nP)]
total_ILP_iteration_iM_iN_no_diag = [0 for inP in range(nP)]
for inP in range(len(nprops)):
n_inP = nprops[inP]
print("Computing data for rows = "+str(iM)+" and cols = "+str(iN)+"and nprops = "+str(n_inP))
time_avg, timed_out_avg, total_iter_avg, total_ILP_avg, time_avg_std = run_iterations(iM, iN, n_inP, Niter, False, KEY) # Nodiagonal transitions
time_avg_std_iM_iN_no_diag[inP-1] = time_avg_std
time_arr_iM_iN_no_diag[inP-1] = time_avg
timed_out_iM_iN_no_diag[inP-1] = timed_out_avg
total_iteration_iM_iN_no_diag[inP-1] = total_iter_avg
total_ILP_iteration_iM_iN_no_diag[inP-1] = total_ILP_avg
if diag:
time_avg, timed_out_avg, total_iter_avg, total_ILP_avg, time_avg_std = run_iterations(iM, iN, inP, Niter, True, KEY) # Nodiagonal transitions
time_arr_iM_iN_diag[inP-1] = time_avg
time_avg_std_iM_iN_diag[inP-1] = time_avg_std
timed_out_iM_iN_diag[inP-1] = timed_out_avg
total_iteration_iM_iN_diag[inP-1] = total_iter_avg
total_ILP_iteration_iM_iN_no_diag[inP-1] = total_ILP_avg
time_arr_no_diag[ii] = time_arr_iM_iN_no_diag.copy()
time_avg_std_no_diag[ii] = time_avg_std_iM_iN_no_diag.copy()
timed_out_num_no_diag[ii] = timed_out_iM_iN_no_diag.copy()
total_iterations_no_diag[ii] = total_iteration_iM_iN_no_diag.copy()
total_iterations_ILP_no_diag[ii] = total_iteration_iM_iN_no_diag.copy()
# With diagonals:
save_data(KEY, t, P, [time_arr_no_diag, time_avg_std_no_diag, timed_out_num_no_diag, total_iterations_no_diag, total_iterations_ILP_no_diag])
fig, ax = plot_parametrized_figure(KEY, t, P, [time_arr_no_diag, time_avg_std_no_diag, timed_out_num_no_diag, total_iterations_no_diag, total_iterations_ILP_no_diag])
return fig, ax
# ====================================================================================================
# Random Graphs
# ====================================================================================================
# Running SAP_augmented_paths constraints:
def run_rg_SAPs():
Niter = 20 # No. of iterations for each example
t = [3,4,5,6,7] # Grid size
lM = len(t)
lN = len(t)
P = [[2,3,4], [2,3], [2,3], [2,3], [2,3]]
time_arr_no_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
timed_out_num_no_diag = [[] for iM in range(lM)]# Stores no. of infeasible iterations
time_arr_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
timed_out_num_diag = [[] for iM in range(lM)] # Stores no. of infeasible iterations
fail_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
fail_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
total_iterations_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
total_iterations_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
total_iterations_ILP_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
total_iterations_ILP_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
KEY = "SAP" # Use only SAP constraints
for ii in range(lM):
iM = t[ii]
iN = t[ii]
nstates = iM*iN
nprops = P[ii]
nP = len(nprops)# No. of propositions
time_arr_iM_iN_no_diag = [0 for inP in range(nP)]
timed_out_iM_iN_no_diag = [0 for inP in range(nP)]
time_arr_iM_iN_diag = [0 for inP in range(nP)]
timed_out_iM_iN_diag = [0 for inP in range(nP)]
fail_iM_iN_diag = [0 for inP in range(nP)]
fail_iM_iN_no_diag = [0 for inP in range(nP)]
total_iteration_iM_iN_diag = [0 for inP in range(nP)]
total_iteration_iM_iN_no_diag = [0 for inP in range(nP)]
total_ILP_iteration_iM_iN_diag = [0 for inP in range(nP)]
total_ILP_iteration_iM_iN_no_diag = [0 for inP in range(nP)]
for inP in range(len(nprops)):
n_inP = nprops[inP]
print("Computing data for t = "+str(iM)+" and nprops = "+str(n_inP))
time_avg, timed_out_avg, total_iter_avg, total_ILP_avg = run_iterations(iM, iN, n_inP, Niter, False, KEY) # Nodiagonal transitions
time_arr_iM_iN_no_diag[inP-1] = time_avg
timed_out_iM_iN_no_diag[inP-1] = timed_out_avg
total_iteration_iM_iN_no_diag[inP-1] = total_iter_avg
total_ILP_iteration_iM_iN_no_diag[inP-1] = total_ILP_avg
time_avg, timed_out_avg, total_iter_avg, total_ILP_avg = run_iterations(iM, iN, inP, Niter, True, KEY) # Nodiagonal transitions
time_arr_iM_iN_diag[inP-1] = time_avg
timed_out_iM_iN_diag[inP-1] = timed_out_avg
total_iteration_iM_iN_diag[inP-1] = total_iter_avg
total_ILP_iteration_iM_iN_no_diag[inP-1] = total_ILP_avg
time_arr_no_diag[ii] = time_arr_iM_iN_no_diag.copy()
timed_out_num_no_diag[ii] = timed_out_iM_iN_no_diag.copy()
time_arr_diag[ii] = time_arr_iM_iN_diag.copy()
timed_out_num_diag[ii] = timed_out_iM_iN_diag.copy()
total_iterations_diag[ii] = total_iteration_iM_iN_diag.copy()
total_iterations_no_diag[ii] = total_iteration_iM_iN_no_diag.copy()
total_iterations_ILP_diag[ii] = total_iteration_iM_iN_diag.copy()
total_iterations_ILP_no_diag[ii] = total_iteration_iM_iN_no_diag.copy()
# no diagonals:
save_data(KEY, t, P, [time_arr_no_diag, timed_out_num_no_diag, total_iterations_no_diag, total_iterations_ILP_no_diag], [time_arr_diag, timed_out_num_diag, total_iterations_diag, total_iterations_ILP_diag])
fig, ax = plot_parametrized_figure(KEY, t, P, [time_arr_no_diag, timed_out_num_no_diag, total_iterations_no_diag, total_iterations_ILP_no_diag], [time_arr_diag, timed_out_num_diag, total_iterations_diag, total_iterations_ILP_diag])
FIG = []
AX = []
return FIG, AX
# Running ALL_augmented_paths constraints:
def run_rg_ALL():
FIG = []
AX = []
Niter = 2 # No. of iterations for each example
M = [9,10,11,12,13,14] # Number of nodes
N = [lambda M: 2*M, lambda M: 3*M]#, lambda M: int(np.floor(M*(M-1)/4))] # No. of edges
lM = len(M)
formula = ["2N", "3N"]
# lN = len(t)
for ilN in range(len(N)):
N_expr = N[ilN]
flN = formula[ilN]# Formula
P = [[2,3], [2,3], [2,3], [2,3], [2,3], [2,3], [2], [2]] # For each N, how many props for every graph
time_arr_no_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
timed_out_num_no_diag = [[] for iM in range(lM)]# Stores no. of infeasible iterations
time_arr_diag = [[] for iM in range(lM)] # Stores time taking to solve a problem
timed_out_num_diag = [[] for iM in range(lM)] # Stores no. of infeasible iterations
fail_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
fail_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
total_iterations_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
total_iterations_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
total_iterations_ILP_no_diag = [[] for iM in range(lM)] # iterations when no diagonal transitions are possible
total_iterations_ILP_diag = [[] for iM in range(lM)] # Iterations when diagonal transitions are possible
KEY = "ALL" # Use all simple path constraints:
for ii in range(lM):
iM = M[ii]
iN = N_expr(M[ii])
nstates = iM*iN
nprops = P[ii]
nP = len(nprops)# No. of propositions
time_arr_iM_iN_no_diag = [0 for inP in range(nP)]
timed_out_iM_iN_no_diag = [0 for inP in range(nP)]
total_iteration_iM_iN_no_diag = [0 for inP in range(nP)]
total_ILP_iteration_iM_iN_no_diag = [0 for inP in range(nP)]
for inP in range(len(nprops)):
n_inP = nprops[inP]
print("Computing data for t = "+str(iM)+" and nprops = "+str(n_inP))
time_avg, timed_out_avg, total_iter_avg, total_ILP_avg, time_avg_std = run_iterations_random_graph(iM, iN, n_inP, Niter, False, KEY) # Nodiagonal transitions
time_arr_iM_iN_no_diag[inP-1] = time_avg
timed_out_iM_iN_no_diag[inP-1] = timed_out_avg
total_iteration_iM_iN_no_diag[inP-1] = total_iter_avg
total_ILP_iteration_iM_iN_no_diag[inP-1] = total_ILP_avg
time_arr_no_diag[ii] = time_arr_iM_iN_no_diag.copy()
timed_out_num_no_diag[ii] = timed_out_iM_iN_no_diag.copy()
total_iterations_no_diag[ii] = total_iteration_iM_iN_no_diag.copy()
total_iterations_ILP_no_diag[ii] = total_iteration_iM_iN_no_diag.copy()
# With diagonals:
save_data_random_graph(KEY, M, flN, P, [time_arr_no_diag, timed_out_num_no_diag, total_iterations_no_diag, total_iterations_ILP_no_diag])
fig, ax = plot_parametrized_figure_rg(KEY, M, flN, P, [time_arr_no_diag, timed_out_num_no_diag, total_iterations_no_diag, total_iterations_ILP_no_diag])
FIG.append(fig)
AX.append(ax)
return FIG, AX
if __name__ == '__main__':
print("Running all_augmenting paths simulations for random gridworlds")
fig, ax = run_ALL(False)
print("Running shortest_augmenting_paths simulations for random gridworlds")
fig2, ax2 = run_SAPs(False)
plt.show()
| 47.477401
| 310
| 0.594395
| 5,104
| 33,614
| 3.584835
| 0.056622
| 0.057714
| 0.022955
| 0.038258
| 0.91075
| 0.89818
| 0.876537
| 0.86342
| 0.859868
| 0.852599
| 0
| 0.012624
| 0.25061
| 33,614
| 708
| 311
| 47.477401
| 0.713736
| 0.184774
| 0
| 0.775986
| 0
| 0
| 0.050237
| 0.002572
| 0
| 0
| 0
| 0
| 0.010753
| 1
| 0.016129
| false
| 0
| 0.023297
| 0
| 0.051971
| 0.010753
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1d32b0e5a8f6bf412a272c712160c608c3cda1d
| 163
|
py
|
Python
|
SPOJ/Random/AGS/gen.py
|
VastoLorde95/Competitive-Programming
|
6c990656178fb0cd33354cbe5508164207012f24
|
[
"MIT"
] | 170
|
2017-07-25T14:47:29.000Z
|
2022-01-26T19:16:31.000Z
|
SPOJ/Random/AGS/gen.py
|
navodit15/Competitive-Programming
|
6c990656178fb0cd33354cbe5508164207012f24
|
[
"MIT"
] | null | null | null |
SPOJ/Random/AGS/gen.py
|
navodit15/Competitive-Programming
|
6c990656178fb0cd33354cbe5508164207012f24
|
[
"MIT"
] | 55
|
2017-07-28T06:17:33.000Z
|
2021-10-31T03:06:22.000Z
|
from random import randrange
print 100
a,b,c = randrange(100000000),randrange(100000000),randrange(100000000)
for i in xrange(1,101):
print a,b,c
print i, 10000
| 23.285714
| 70
| 0.766871
| 28
| 163
| 4.464286
| 0.607143
| 0.432
| 0.048
| 0.576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0.122699
| 163
| 6
| 71
| 27.166667
| 0.601399
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.166667
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ae7472849a54187a27cee0f5cbae547f9b8f9e5c
| 101
|
py
|
Python
|
utility.py
|
sethorpe/translinkautohw
|
3381df8a0e98d29b4850efc36345430682dbe4cd
|
[
"MIT-0"
] | null | null | null |
utility.py
|
sethorpe/translinkautohw
|
3381df8a0e98d29b4850efc36345430682dbe4cd
|
[
"MIT-0"
] | null | null | null |
utility.py
|
sethorpe/translinkautohw
|
3381df8a0e98d29b4850efc36345430682dbe4cd
|
[
"MIT-0"
] | null | null | null |
import datetime
def current_time():
return datetime.datetime.now().strftime("%H_%M_%S_%d_%m_%Y")
| 25.25
| 64
| 0.722772
| 16
| 101
| 4.1875
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09901
| 101
| 4
| 64
| 25.25
| 0.736264
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ae7b6ec133e883073900b73434496f10078720d1
| 148
|
py
|
Python
|
models/bart/__init__.py
|
launchnlp/cliff_summ
|
813a71f9ab8f379af9b47a784b34d6216e6c2b27
|
[
"Apache-2.0"
] | null | null | null |
models/bart/__init__.py
|
launchnlp/cliff_summ
|
813a71f9ab8f379af9b47a784b34d6216e6c2b27
|
[
"Apache-2.0"
] | null | null | null |
models/bart/__init__.py
|
launchnlp/cliff_summ
|
813a71f9ab8f379af9b47a784b34d6216e6c2b27
|
[
"Apache-2.0"
] | null | null | null |
from . import contrastive_translation
from . import contrastive_loss
from . import contrastive_translation_multi_neg
from . import constrative_bart
| 29.6
| 47
| 0.864865
| 18
| 148
| 6.777778
| 0.5
| 0.327869
| 0.516393
| 0.52459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 148
| 4
| 48
| 37
| 0.924242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ae94af391bca7b8a3da61a18f4c73d791b8ac87e
| 260,121
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_syslog_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_syslog_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_syslog_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_infra_syslog_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR infra\-syslog package configuration.
This module contains definitions
for the following management objects\:
syslog\-service\: Syslog Timestamp Services
syslog\: syslog
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class FacilityEnum(Enum):
"""
FacilityEnum
Facility
.. data:: kern = 0
Kernel Facility
.. data:: user = 8
User Facility
.. data:: mail = 16
Mail Facility
.. data:: daemon = 24
Daemon Facility
.. data:: auth = 32
Auth Facility
.. data:: syslog = 40
Syslog Facility
.. data:: lpr = 48
Lpr Facility
.. data:: news = 56
News Facility
.. data:: uucp = 64
Uucp Facility
.. data:: cron = 72
Cron Facility
.. data:: authpriv = 80
Authpriv Facility
.. data:: ftp = 88
Ftp Facility
.. data:: local0 = 128
Local0 Facility
.. data:: local1 = 136
Local1 Facility
.. data:: local2 = 144
Local2 Facility
.. data:: local3 = 152
Local3 Facility
.. data:: local4 = 160
Local4 Facility
.. data:: local5 = 168
Local5 Facility
.. data:: local6 = 176
Local6 Facility
.. data:: local7 = 184
Local7 Facility
.. data:: sys9 = 192
System9 Facility
.. data:: sys10 = 200
System10 Facility
.. data:: sys11 = 208
System11 Facility
.. data:: sys12 = 216
System12 Facility
.. data:: sys13 = 224
System13 Facility
.. data:: sys14 = 232
System14 Facility
"""
kern = 0
user = 8
mail = 16
daemon = 24
auth = 32
syslog = 40
lpr = 48
news = 56
uucp = 64
cron = 72
authpriv = 80
ftp = 88
local0 = 128
local1 = 136
local2 = 144
local3 = 152
local4 = 160
local5 = 168
local6 = 176
local7 = 184
sys9 = 192
sys10 = 200
sys11 = 208
sys12 = 216
sys13 = 224
sys14 = 232
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['FacilityEnum']
class LogCollectFrequencyEnum(Enum):
"""
LogCollectFrequencyEnum
Log collect frequency
.. data:: weekly = 1
Collect log in files on a weekly basis
.. data:: daily = 2
Collect log in files on a daily basis
"""
weekly = 1
daily = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LogCollectFrequencyEnum']
class LogMessageSeverityEnum(Enum):
"""
LogMessageSeverityEnum
Log message severity
.. data:: emergency = 0
System is unusable (severity=0)
.. data:: alert = 1
Immediate action needed (severity=1)
.. data:: critical = 2
Critical conditions (severity=2)
.. data:: error = 3
Error conditions (severity=3)
.. data:: warning = 4
Warning conditions (severity=4)
.. data:: notice = 5
Normal but significant conditions (severity=5)
.. data:: informational = 6
Informational messages (severity=6)
.. data:: debug = 7
Debugging messages (severity=7)
"""
emergency = 0
alert = 1
critical = 2
error = 3
warning = 4
notice = 5
informational = 6
debug = 7
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LogMessageSeverityEnum']
class LogSeverityEnum(Enum):
"""
LogSeverityEnum
Log severity
.. data:: emergency = 0
System is unusable (severity=0)
.. data:: alert = 1
Immediate action needed (severity=1)
.. data:: critical = 2
Critical conditions (severity=2)
.. data:: error = 3
Error conditions (severity=3)
.. data:: warning = 4
Warning conditions (severity=4)
.. data:: notice = 5
Normal but significant conditions (severity=5)
.. data:: informational = 6
Informational messages (severity=6)
.. data:: debug = 7
Debugging messages (severity=7)
"""
emergency = 0
alert = 1
critical = 2
error = 3
warning = 4
notice = 5
informational = 6
debug = 7
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LogSeverityEnum']
class LoggingDscpEnum(Enum):
"""
LoggingDscpEnum
Logging dscp
.. data:: dscp = 1
Logging TOS type DSCP
"""
dscp = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LoggingDscpEnum']
class LoggingDscpValueEnum(Enum):
"""
LoggingDscpValueEnum
Logging dscp value
.. data:: default = 0
Applicable to DSCP: bits 000000
.. data:: af11 = 10
Applicable to DSCP: bits 001010
.. data:: af12 = 12
Applicable to DSCP: bits 001100
.. data:: af13 = 14
Applicable to DSCP: bits 001110
.. data:: af21 = 18
Applicable to DSCP: bits 010010
.. data:: af22 = 20
Applicable to DSCP: bits 010100
.. data:: af23 = 22
Applicable to DSCP: bits 010110
.. data:: af31 = 26
Applicable to DSCP: bits 011010
.. data:: af32 = 28
Applicable to DSCP: bits 011100
.. data:: af33 = 30
Applicable to DSCP: bits 011110
.. data:: af41 = 34
Applicable to DSCP: bits 100010
.. data:: af42 = 36
Applicable to DSCP: bits 100100
.. data:: af43 = 38
Applicable to DSCP: bits 100110
.. data:: ef = 46
Applicable to DSCP: bits 101110
.. data:: cs1 = 8
Applicable to DSCP: bits 001000
.. data:: cs2 = 16
Applicable to DSCP: bits 010000
.. data:: cs3 = 24
Applicable to DSCP: bits 011000
.. data:: cs4 = 32
Applicable to DSCP: bits 100000
.. data:: cs5 = 40
Applicable to DSCP: bits 101000
.. data:: cs6 = 48
Applicable to DSCP: bits 110000
.. data:: cs7 = 56
Applicable to DSCP: bits 111000
"""
default = 0
af11 = 10
af12 = 12
af13 = 14
af21 = 18
af22 = 20
af23 = 22
af31 = 26
af32 = 28
af33 = 30
af41 = 34
af42 = 36
af43 = 38
ef = 46
cs1 = 8
cs2 = 16
cs3 = 24
cs4 = 32
cs5 = 40
cs6 = 48
cs7 = 56
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LoggingDscpValueEnum']
class LoggingLevelsEnum(Enum):
"""
LoggingLevelsEnum
Logging levels
.. data:: emergency = 0
Emergency Level Msg
.. data:: alert = 1
Alert Level Msg
.. data:: critical = 2
Critical Level Msg
.. data:: error = 3
Error Level Msg
.. data:: warning = 4
Warning Level Msg
.. data:: notice = 5
Notification Level Msg
.. data:: info = 6
Informational Level Msg
.. data:: debug = 7
Debugging Level Msg
.. data:: disable = 15
Disable logging
"""
emergency = 0
alert = 1
critical = 2
error = 3
warning = 4
notice = 5
info = 6
debug = 7
disable = 15
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LoggingLevelsEnum']
class LoggingPrecedenceEnum(Enum):
"""
LoggingPrecedenceEnum
Logging precedence
.. data:: precedence = 0
Logging TOS type precedence
"""
precedence = 0
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LoggingPrecedenceEnum']
class LoggingPrecedenceValueEnum(Enum):
"""
LoggingPrecedenceValueEnum
Logging precedence value
.. data:: routine = 0
Applicable to precedence: value 0
.. data:: priority = 1
Applicable to precedence: value 1
.. data:: immediate = 2
Applicable to precedence: value 2
.. data:: flash = 3
Applicable to precedence: value 3
.. data:: flash_override = 4
Applicable to precedence: value 4
.. data:: critical = 5
Applicable to precedence: value 5
.. data:: internet = 6
Applicable to precedence: value 6
.. data:: network = 7
Applicable to precedence: value 7
"""
routine = 0
priority = 1
immediate = 2
flash = 3
flash_override = 4
critical = 5
internet = 6
network = 7
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LoggingPrecedenceValueEnum']
class LoggingTosEnum(Enum):
"""
LoggingTosEnum
Logging tos
.. data:: precedence = 0
Logging TOS type precedence
.. data:: dscp = 1
Logging TOS type DSCP
"""
precedence = 0
dscp = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['LoggingTosEnum']
class TimeInfoEnum(Enum):
"""
TimeInfoEnum
Time info
.. data:: disable = 0
Exclude
.. data:: enable = 1
Include
"""
disable = 0
enable = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['TimeInfoEnum']
class SyslogService(object):
"""
Syslog Timestamp Services
.. attribute:: timestamps
Timestamp debug/log messages configuration
**type**\: :py:class:`Timestamps <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.SyslogService.Timestamps>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.timestamps = SyslogService.Timestamps()
self.timestamps.parent = self
class Timestamps(object):
"""
Timestamp debug/log messages configuration
.. attribute:: debug
Timestamp debug messages
**type**\: :py:class:`Debug <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.SyslogService.Timestamps.Debug>`
.. attribute:: enable
Enable timestamp debug/log messages
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: log
Timestamp log messages
**type**\: :py:class:`Log <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.SyslogService.Timestamps.Log>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.debug = SyslogService.Timestamps.Debug()
self.debug.parent = self
self.enable = None
self.log = SyslogService.Timestamps.Log()
self.log.parent = self
class Log(object):
"""
Timestamp log messages
.. attribute:: log_datetime
Timestamp with date and time
**type**\: :py:class:`LogDatetime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.SyslogService.Timestamps.Log.LogDatetime>`
.. attribute:: log_timestamp_disable
Disable timestamp log messages
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: log_uptime
Timestamp with systime uptime
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.log_datetime = SyslogService.Timestamps.Log.LogDatetime()
self.log_datetime.parent = self
self.log_timestamp_disable = None
self.log_uptime = None
class LogDatetime(object):
"""
Timestamp with date and time
.. attribute:: log_datetime_value
Set timestamp for log message
**type**\: :py:class:`LogDatetimeValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.SyslogService.Timestamps.Log.LogDatetime.LogDatetimeValue>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.log_datetime_value = SyslogService.Timestamps.Log.LogDatetime.LogDatetimeValue()
self.log_datetime_value.parent = self
class LogDatetimeValue(object):
"""
Set timestamp for log message
.. attribute:: msec
Seconds
**type**\: :py:class:`TimeInfoEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.TimeInfoEnum>`
**units**\: second
**default value**\: enable
.. attribute:: time_stamp_value
Time
**type**\: :py:class:`TimeInfoEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.TimeInfoEnum>`
**default value**\: enable
.. attribute:: time_zone
Timezone
**type**\: :py:class:`TimeInfoEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.TimeInfoEnum>`
**default value**\: disable
.. attribute:: year
Year
**type**\: :py:class:`TimeInfoEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.TimeInfoEnum>`
**default value**\: disable
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.msec = None
self.time_stamp_value = None
self.time_zone = None
self.year = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog-service/Cisco-IOS-XR-infra-syslog-cfg:timestamps/Cisco-IOS-XR-infra-syslog-cfg:log/Cisco-IOS-XR-infra-syslog-cfg:log-datetime/Cisco-IOS-XR-infra-syslog-cfg:log-datetime-value'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.msec is not None:
return True
if self.time_stamp_value is not None:
return True
if self.time_zone is not None:
return True
if self.year is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['SyslogService.Timestamps.Log.LogDatetime.LogDatetimeValue']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog-service/Cisco-IOS-XR-infra-syslog-cfg:timestamps/Cisco-IOS-XR-infra-syslog-cfg:log/Cisco-IOS-XR-infra-syslog-cfg:log-datetime'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.log_datetime_value is not None and self.log_datetime_value._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['SyslogService.Timestamps.Log.LogDatetime']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog-service/Cisco-IOS-XR-infra-syslog-cfg:timestamps/Cisco-IOS-XR-infra-syslog-cfg:log'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.log_datetime is not None and self.log_datetime._has_data():
return True
if self.log_timestamp_disable is not None:
return True
if self.log_uptime is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['SyslogService.Timestamps.Log']['meta_info']
class Debug(object):
"""
Timestamp debug messages
.. attribute:: debug_datetime
Timestamp with date and time
**type**\: :py:class:`DebugDatetime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.SyslogService.Timestamps.Debug.DebugDatetime>`
.. attribute:: debug_timestamp_disable
Disable timestamp debug messages
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: debug_uptime
Timestamp with systime uptime
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.debug_datetime = SyslogService.Timestamps.Debug.DebugDatetime()
self.debug_datetime.parent = self
self.debug_timestamp_disable = None
self.debug_uptime = None
class DebugDatetime(object):
"""
Timestamp with date and time
.. attribute:: datetime_value
Set time format for debug msg
**type**\: :py:class:`DatetimeValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.SyslogService.Timestamps.Debug.DebugDatetime.DatetimeValue>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.datetime_value = SyslogService.Timestamps.Debug.DebugDatetime.DatetimeValue()
self.datetime_value.parent = self
class DatetimeValue(object):
"""
Set time format for debug msg
.. attribute:: msec
Seconds
**type**\: :py:class:`TimeInfoEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.TimeInfoEnum>`
**units**\: second
**default value**\: enable
.. attribute:: time_stamp_value
Time
**type**\: :py:class:`TimeInfoEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.TimeInfoEnum>`
**default value**\: enable
.. attribute:: time_zone
Timezone
**type**\: :py:class:`TimeInfoEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.TimeInfoEnum>`
**default value**\: disable
.. attribute:: year
Year
**type**\: :py:class:`TimeInfoEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.TimeInfoEnum>`
**default value**\: disable
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.msec = None
self.time_stamp_value = None
self.time_zone = None
self.year = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog-service/Cisco-IOS-XR-infra-syslog-cfg:timestamps/Cisco-IOS-XR-infra-syslog-cfg:debug/Cisco-IOS-XR-infra-syslog-cfg:debug-datetime/Cisco-IOS-XR-infra-syslog-cfg:datetime-value'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.msec is not None:
return True
if self.time_stamp_value is not None:
return True
if self.time_zone is not None:
return True
if self.year is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['SyslogService.Timestamps.Debug.DebugDatetime.DatetimeValue']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog-service/Cisco-IOS-XR-infra-syslog-cfg:timestamps/Cisco-IOS-XR-infra-syslog-cfg:debug/Cisco-IOS-XR-infra-syslog-cfg:debug-datetime'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.datetime_value is not None and self.datetime_value._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['SyslogService.Timestamps.Debug.DebugDatetime']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog-service/Cisco-IOS-XR-infra-syslog-cfg:timestamps/Cisco-IOS-XR-infra-syslog-cfg:debug'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.debug_datetime is not None and self.debug_datetime._has_data():
return True
if self.debug_timestamp_disable is not None:
return True
if self.debug_uptime is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['SyslogService.Timestamps.Debug']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog-service/Cisco-IOS-XR-infra-syslog-cfg:timestamps'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.debug is not None and self.debug._has_data():
return True
if self.enable is not None:
return True
if self.log is not None and self.log._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['SyslogService.Timestamps']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog-service'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.timestamps is not None and self.timestamps._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['SyslogService']['meta_info']
class Syslog(object):
"""
syslog
.. attribute:: alarm_logger
Alarm Logger Properties
**type**\: :py:class:`AlarmLogger <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.AlarmLogger>`
.. attribute:: archive
Archive attributes configuration
**type**\: :py:class:`Archive <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Archive>`
.. attribute:: buffered_logging
Set buffered logging parameters
**type**\: :py:class:`BufferedLogging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.BufferedLogging>`
.. attribute:: console_logging
Set console logging
**type**\: :py:class:`ConsoleLogging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.ConsoleLogging>`
.. attribute:: correlator
Configure properties of the event correlator
**type**\: :py:class:`Correlator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator>`
.. attribute:: enable_console_logging
Enabled or disabled
**type**\: bool
.. attribute:: files
Configure logging file destination
**type**\: :py:class:`Files <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Files>`
.. attribute:: history_logging
Set history logging
**type**\: :py:class:`HistoryLogging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HistoryLogging>`
.. attribute:: host_name_prefix
Hostname prefix to add on msgs to servers
**type**\: str
.. attribute:: host_server
Configure logging host
**type**\: :py:class:`HostServer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer>`
.. attribute:: ipv4
Syslog TOS bit for outgoing messages
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Ipv4>`
.. attribute:: ipv6
Syslog traffic class bit for outgoing messages
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Ipv6>`
.. attribute:: local_log_file_size
Set size of the local log file
**type**\: int
**range:** 0..4294967295
**default value**\: 32768
.. attribute:: logging_facilities
Modify message logging facilities
**type**\: :py:class:`LoggingFacilities <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.LoggingFacilities>`
.. attribute:: monitor_logging
Set monitor logging
**type**\: :py:class:`MonitorLogging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.MonitorLogging>`
.. attribute:: source_interface_table
Configure source interface
**type**\: :py:class:`SourceInterfaceTable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.SourceInterfaceTable>`
.. attribute:: suppress_duplicates
Suppress consecutive duplicate messages
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: suppression
Configure properties of the syslog/alarm suppression
**type**\: :py:class:`Suppression <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Suppression>`
.. attribute:: trap_logging
Set trap logging
**type**\: :py:class:`TrapLogging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.TrapLogging>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.alarm_logger = Syslog.AlarmLogger()
self.alarm_logger.parent = self
self.archive = Syslog.Archive()
self.archive.parent = self
self.buffered_logging = Syslog.BufferedLogging()
self.buffered_logging.parent = self
self.console_logging = Syslog.ConsoleLogging()
self.console_logging.parent = self
self.correlator = Syslog.Correlator()
self.correlator.parent = self
self.enable_console_logging = None
self.files = Syslog.Files()
self.files.parent = self
self.history_logging = Syslog.HistoryLogging()
self.history_logging.parent = self
self.host_name_prefix = None
self.host_server = Syslog.HostServer()
self.host_server.parent = self
self.ipv4 = Syslog.Ipv4()
self.ipv4.parent = self
self.ipv6 = Syslog.Ipv6()
self.ipv6.parent = self
self.local_log_file_size = None
self.logging_facilities = Syslog.LoggingFacilities()
self.logging_facilities.parent = self
self.monitor_logging = Syslog.MonitorLogging()
self.monitor_logging.parent = self
self.source_interface_table = Syslog.SourceInterfaceTable()
self.source_interface_table.parent = self
self.suppress_duplicates = None
self.suppression = Syslog.Suppression()
self.suppression.parent = self
self.trap_logging = Syslog.TrapLogging()
self.trap_logging.parent = self
class MonitorLogging(object):
"""
Set monitor logging
.. attribute:: logging_level
Monitor Logging Level
**type**\: :py:class:`LoggingLevelsEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingLevelsEnum>`
**default value**\: debug
.. attribute:: monitor_discriminator
Set monitor logging discriminators
**type**\: :py:class:`MonitorDiscriminator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.MonitorLogging.MonitorDiscriminator>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.logging_level = None
self.monitor_discriminator = Syslog.MonitorLogging.MonitorDiscriminator()
self.monitor_discriminator.parent = self
class MonitorDiscriminator(object):
"""
Set monitor logging discriminators
.. attribute:: match1
Set monitor logging match1 discriminator
**type**\: str
.. attribute:: match2
Set monitor logging match2 discriminator
**type**\: str
.. attribute:: match3
Set monitor logging match3 discriminator
**type**\: str
.. attribute:: nomatch1
Set monitor logging no\-match1 discriminator
**type**\: str
.. attribute:: nomatch2
Set monitor logging no\-match2 discriminator
**type**\: str
.. attribute:: nomatch3
Set monitor logging no\-match3 discriminator
**type**\: str
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.match1 = None
self.match2 = None
self.match3 = None
self.nomatch1 = None
self.nomatch2 = None
self.nomatch3 = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:monitor-logging/Cisco-IOS-XR-infra-syslog-cfg:monitor-discriminator'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.match1 is not None:
return True
if self.match2 is not None:
return True
if self.match3 is not None:
return True
if self.nomatch1 is not None:
return True
if self.nomatch2 is not None:
return True
if self.nomatch3 is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.MonitorLogging.MonitorDiscriminator']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:monitor-logging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.logging_level is not None:
return True
if self.monitor_discriminator is not None and self.monitor_discriminator._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.MonitorLogging']['meta_info']
class HistoryLogging(object):
"""
Set history logging
.. attribute:: history_size
Logging history size
**type**\: int
**range:** 1..500
**default value**\: 1
.. attribute:: logging_level
History logging level
**type**\: :py:class:`LoggingLevelsEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingLevelsEnum>`
**default value**\: warning
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.history_size = None
self.logging_level = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:history-logging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.history_size is not None:
return True
if self.logging_level is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HistoryLogging']['meta_info']
class LoggingFacilities(object):
"""
Modify message logging facilities
.. attribute:: facility_level
Facility from which logging is done
**type**\: :py:class:`FacilityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.FacilityEnum>`
**default value**\: local7
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.facility_level = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:logging-facilities'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.facility_level is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.LoggingFacilities']['meta_info']
class TrapLogging(object):
"""
Set trap logging
.. attribute:: logging_level
Trap logging level
**type**\: :py:class:`LoggingLevelsEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingLevelsEnum>`
**default value**\: info
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.logging_level = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:trap-logging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.logging_level is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.TrapLogging']['meta_info']
class BufferedLogging(object):
"""
Set buffered logging parameters
.. attribute:: buffer_size
Logging buffered size
**type**\: int
**range:** 4096..4294967295
**default value**\: 2097152
.. attribute:: buffered_discriminator
Set buffered logging discriminators
**type**\: :py:class:`BufferedDiscriminator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.BufferedLogging.BufferedDiscriminator>`
.. attribute:: logging_level
Logging level for Buffered logging
**type**\: :py:class:`LoggingLevelsEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingLevelsEnum>`
**default value**\: debug
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.buffer_size = None
self.buffered_discriminator = Syslog.BufferedLogging.BufferedDiscriminator()
self.buffered_discriminator.parent = self
self.logging_level = None
class BufferedDiscriminator(object):
"""
Set buffered logging discriminators
.. attribute:: match1
Set buffered logging match1 discriminator
**type**\: str
.. attribute:: match2
Set buffered logging match2 discriminator
**type**\: str
.. attribute:: match3
Set buffered logging match3 discriminator
**type**\: str
.. attribute:: nomatch1
Set buffered logging no\-match1 discriminator
**type**\: str
.. attribute:: nomatch2
Set buffered logging no\-match2 discriminator
**type**\: str
.. attribute:: nomatch3
Set buffered logging no\-match3 discriminator
**type**\: str
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.match1 = None
self.match2 = None
self.match3 = None
self.nomatch1 = None
self.nomatch2 = None
self.nomatch3 = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:buffered-logging/Cisco-IOS-XR-infra-syslog-cfg:buffered-discriminator'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.match1 is not None:
return True
if self.match2 is not None:
return True
if self.match3 is not None:
return True
if self.nomatch1 is not None:
return True
if self.nomatch2 is not None:
return True
if self.nomatch3 is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.BufferedLogging.BufferedDiscriminator']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:buffered-logging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.buffer_size is not None:
return True
if self.buffered_discriminator is not None and self.buffered_discriminator._has_data():
return True
if self.logging_level is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.BufferedLogging']['meta_info']
class HostServer(object):
"""
Configure logging host
.. attribute:: vrfs
VRF table
**type**\: :py:class:`Vrfs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.vrfs = Syslog.HostServer.Vrfs()
self.vrfs.parent = self
class Vrfs(object):
"""
VRF table
.. attribute:: vrf
VRF specific data
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.vrf = YList()
self.vrf.parent = self
self.vrf.name = 'vrf'
class Vrf(object):
"""
VRF specific data
.. attribute:: vrf_name <key>
Name of the VRF instance
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: hosts
List of the logging host
**type**\: :py:class:`Hosts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Hosts>`
.. attribute:: ipv4s
List of the IPv4 logging host
**type**\: :py:class:`Ipv4S <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv4S>`
.. attribute:: ipv6s
List of the IPv6 logging host
**type**\: :py:class:`Ipv6S <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv6S>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.vrf_name = None
self.hosts = Syslog.HostServer.Vrfs.Vrf.Hosts()
self.hosts.parent = self
self.ipv4s = Syslog.HostServer.Vrfs.Vrf.Ipv4S()
self.ipv4s.parent = self
self.ipv6s = Syslog.HostServer.Vrfs.Vrf.Ipv6S()
self.ipv6s.parent = self
class Ipv6S(object):
"""
List of the IPv6 logging host
.. attribute:: ipv6
IPv6 address of the logging host
**type**\: list of :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.ipv6 = YList()
self.ipv6.parent = self
self.ipv6.name = 'ipv6'
class Ipv6(object):
"""
IPv6 address of the logging host
.. attribute:: address <key>
IPv6 address of the logging host
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_discriminator
Set IPv6 logging discriminators
**type**\: :py:class:`Ipv6Discriminator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6Discriminator>`
.. attribute:: ipv6_severity_levels
Severity container of the logging host
**type**\: :py:class:`Ipv6SeverityLevels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6SeverityLevels>`
**status**\: obsolete
.. attribute:: ipv6_severity_port
Severity/Port for the logging host
**type**\: :py:class:`Ipv6SeverityPort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6SeverityPort>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.address = None
self.ipv6_discriminator = Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6Discriminator()
self.ipv6_discriminator.parent = self
self.ipv6_severity_levels = Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6SeverityLevels()
self.ipv6_severity_levels.parent = self
self.ipv6_severity_port = Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6SeverityPort()
self.ipv6_severity_port.parent = self
class Ipv6SeverityPort(object):
"""
Severity/Port for the logging host
.. attribute:: port
Port for the logging host
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 514
.. attribute:: severity
Severity for the logging host
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 6
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.port = None
self.severity = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv6-severity-port'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.port is not None:
return True
if self.severity is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6SeverityPort']['meta_info']
class Ipv6Discriminator(object):
"""
Set IPv6 logging discriminators
.. attribute:: match1
Set IPv6 logging match1 discriminator
**type**\: str
.. attribute:: match2
Set IPv6 logging match2 discriminator
**type**\: str
.. attribute:: match3
Set IPv6 logging match3 discriminator
**type**\: str
.. attribute:: nomatch1
Set IPv6 logging no\-match1 discriminator
**type**\: str
.. attribute:: nomatch2
Set IPv6 logging no\-match2 discriminator
**type**\: str
.. attribute:: nomatch3
Set IPv6 logging no\-match3 discriminator
**type**\: str
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.match1 = None
self.match2 = None
self.match3 = None
self.nomatch1 = None
self.nomatch2 = None
self.nomatch3 = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv6-discriminator'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.match1 is not None:
return True
if self.match2 is not None:
return True
if self.match3 is not None:
return True
if self.nomatch1 is not None:
return True
if self.nomatch2 is not None:
return True
if self.nomatch3 is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6Discriminator']['meta_info']
class Ipv6SeverityLevels(object):
"""
Severity container of the logging host
.. attribute:: ipv6_severity_level
Severity for the logging host
**type**\: list of :py:class:`Ipv6SeverityLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6SeverityLevels.Ipv6SeverityLevel>`
**status**\: obsolete
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.ipv6_severity_level = YList()
self.ipv6_severity_level.parent = self
self.ipv6_severity_level.name = 'ipv6_severity_level'
class Ipv6SeverityLevel(object):
"""
Severity for the logging host
.. attribute:: severity <key>
Severity for the logging host
**type**\: :py:class:`LogSeverityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LogSeverityEnum>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.severity = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.severity is None:
raise YPYModelError('Key property severity is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv6-severity-level[Cisco-IOS-XR-infra-syslog-cfg:severity = ' + str(self.severity) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.severity is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6SeverityLevels.Ipv6SeverityLevel']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv6-severity-levels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ipv6_severity_level is not None:
for child_ref in self.ipv6_severity_level:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6.Ipv6SeverityLevels']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv6[Cisco-IOS-XR-infra-syslog-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.ipv6_discriminator is not None and self.ipv6_discriminator._has_data():
return True
if self.ipv6_severity_levels is not None and self.ipv6_severity_levels._has_data():
return True
if self.ipv6_severity_port is not None and self.ipv6_severity_port._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv6S.Ipv6']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv6s'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ipv6 is not None:
for child_ref in self.ipv6:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv6S']['meta_info']
class Hosts(object):
"""
List of the logging host
.. attribute:: host
Name of the logging host
**type**\: list of :py:class:`Host <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Hosts.Host>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.host = YList()
self.host.parent = self
self.host.name = 'host'
class Host(object):
"""
Name of the logging host
.. attribute:: host_name <key>
Name of the logging host
**type**\: str
.. attribute:: host_name_discriminator
Set Hostname logging discriminators
**type**\: :py:class:`HostNameDiscriminator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostNameDiscriminator>`
.. attribute:: host_name_severities
Severity container of the logging host
**type**\: :py:class:`HostNameSeverities <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostNameSeverities>`
**status**\: obsolete
.. attribute:: host_severity_port
Severity/Port for the logging host
**type**\: :py:class:`HostSeverityPort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostSeverityPort>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.host_name = None
self.host_name_discriminator = Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostNameDiscriminator()
self.host_name_discriminator.parent = self
self.host_name_severities = Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostNameSeverities()
self.host_name_severities.parent = self
self.host_severity_port = Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostSeverityPort()
self.host_severity_port.parent = self
class HostNameSeverities(object):
"""
Severity container of the logging host
.. attribute:: host_name_severity
Severity for the logging host
**type**\: list of :py:class:`HostNameSeverity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostNameSeverities.HostNameSeverity>`
**status**\: obsolete
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.host_name_severity = YList()
self.host_name_severity.parent = self
self.host_name_severity.name = 'host_name_severity'
class HostNameSeverity(object):
"""
Severity for the logging host
.. attribute:: severity <key>
Severity for the logging host
**type**\: :py:class:`LogSeverityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LogSeverityEnum>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.severity = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.severity is None:
raise YPYModelError('Key property severity is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:host-name-severity[Cisco-IOS-XR-infra-syslog-cfg:severity = ' + str(self.severity) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.severity is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostNameSeverities.HostNameSeverity']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:host-name-severities'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.host_name_severity is not None:
for child_ref in self.host_name_severity:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostNameSeverities']['meta_info']
class HostNameDiscriminator(object):
"""
Set Hostname logging discriminators
.. attribute:: match1
Set hostname logging match1 discriminator
**type**\: str
.. attribute:: match2
Set hostname logging match2 discriminator
**type**\: str
.. attribute:: match3
Set hostname logging match3 discriminator
**type**\: str
.. attribute:: nomatch1
Set hostname logging no\-match1 discriminator
**type**\: str
.. attribute:: nomatch2
Set hostname logging no\-match2 discriminator
**type**\: str
.. attribute:: nomatch3
Set hostname logging no\-match3 discriminator
**type**\: str
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.match1 = None
self.match2 = None
self.match3 = None
self.nomatch1 = None
self.nomatch2 = None
self.nomatch3 = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:host-name-discriminator'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.match1 is not None:
return True
if self.match2 is not None:
return True
if self.match3 is not None:
return True
if self.nomatch1 is not None:
return True
if self.nomatch2 is not None:
return True
if self.nomatch3 is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostNameDiscriminator']['meta_info']
class HostSeverityPort(object):
"""
Severity/Port for the logging host
.. attribute:: port
Port for the logging host
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 514
.. attribute:: severity
Severity for the logging host
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 6
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.port = None
self.severity = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:host-severity-port'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.port is not None:
return True
if self.severity is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Hosts.Host.HostSeverityPort']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.host_name is None:
raise YPYModelError('Key property host_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:host[Cisco-IOS-XR-infra-syslog-cfg:host-name = ' + str(self.host_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.host_name is not None:
return True
if self.host_name_discriminator is not None and self.host_name_discriminator._has_data():
return True
if self.host_name_severities is not None and self.host_name_severities._has_data():
return True
if self.host_severity_port is not None and self.host_severity_port._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Hosts.Host']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:hosts'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.host is not None:
for child_ref in self.host:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Hosts']['meta_info']
class Ipv4S(object):
"""
List of the IPv4 logging host
.. attribute:: ipv4
IPv4 address of the logging host
**type**\: list of :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.ipv4 = YList()
self.ipv4.parent = self
self.ipv4.name = 'ipv4'
class Ipv4(object):
"""
IPv4 address of the logging host
.. attribute:: address <key>
IPv4 address of the logging host
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv4_discriminator
Set IPv4 logging discriminators
**type**\: :py:class:`Ipv4Discriminator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4Discriminator>`
.. attribute:: ipv4_severity_levels
Severity container of the logging host
**type**\: :py:class:`Ipv4SeverityLevels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4SeverityLevels>`
**status**\: obsolete
.. attribute:: ipv4_severity_port
Severity/Port for the logging host
**type**\: :py:class:`Ipv4SeverityPort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4SeverityPort>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.address = None
self.ipv4_discriminator = Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4Discriminator()
self.ipv4_discriminator.parent = self
self.ipv4_severity_levels = Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4SeverityLevels()
self.ipv4_severity_levels.parent = self
self.ipv4_severity_port = Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4SeverityPort()
self.ipv4_severity_port.parent = self
class Ipv4SeverityLevels(object):
"""
Severity container of the logging host
.. attribute:: ipv4_severity_level
Severity for the logging host
**type**\: list of :py:class:`Ipv4SeverityLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4SeverityLevels.Ipv4SeverityLevel>`
**status**\: obsolete
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.ipv4_severity_level = YList()
self.ipv4_severity_level.parent = self
self.ipv4_severity_level.name = 'ipv4_severity_level'
class Ipv4SeverityLevel(object):
"""
Severity for the logging host
.. attribute:: severity <key>
Severity for the logging host
**type**\: :py:class:`LogSeverityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LogSeverityEnum>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.severity = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.severity is None:
raise YPYModelError('Key property severity is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv4-severity-level[Cisco-IOS-XR-infra-syslog-cfg:severity = ' + str(self.severity) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.severity is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4SeverityLevels.Ipv4SeverityLevel']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv4-severity-levels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ipv4_severity_level is not None:
for child_ref in self.ipv4_severity_level:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4SeverityLevels']['meta_info']
class Ipv4SeverityPort(object):
"""
Severity/Port for the logging host
.. attribute:: port
Port for the logging host
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 514
.. attribute:: severity
Severity for the logging host
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 6
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.port = None
self.severity = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv4-severity-port'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.port is not None:
return True
if self.severity is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4SeverityPort']['meta_info']
class Ipv4Discriminator(object):
"""
Set IPv4 logging discriminators
.. attribute:: match1
Set IPv4 logging match1 discriminator
**type**\: str
.. attribute:: match2
Set IPv4 logging match2 discriminator
**type**\: str
.. attribute:: match3
Set IPv4 logging match3 discriminator
**type**\: str
.. attribute:: nomatch1
Set IPv4 logging no\-match1 discriminator
**type**\: str
.. attribute:: nomatch2
Set IPv4 logging no\-match2 discriminator
**type**\: str
.. attribute:: nomatch3
Set IPv4 logging no\-match3 discriminator
**type**\: str
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.match1 = None
self.match2 = None
self.match3 = None
self.nomatch1 = None
self.nomatch2 = None
self.nomatch3 = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv4-discriminator'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.match1 is not None:
return True
if self.match2 is not None:
return True
if self.match3 is not None:
return True
if self.nomatch1 is not None:
return True
if self.nomatch2 is not None:
return True
if self.nomatch3 is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4.Ipv4Discriminator']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv4[Cisco-IOS-XR-infra-syslog-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.ipv4_discriminator is not None and self.ipv4_discriminator._has_data():
return True
if self.ipv4_severity_levels is not None and self.ipv4_severity_levels._has_data():
return True
if self.ipv4_severity_port is not None and self.ipv4_severity_port._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv4S.Ipv4']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:ipv4s'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ipv4 is not None:
for child_ref in self.ipv4:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf.Ipv4S']['meta_info']
@property
def _common_path(self):
if self.vrf_name is None:
raise YPYModelError('Key property vrf_name is None')
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:host-server/Cisco-IOS-XR-infra-syslog-cfg:vrfs/Cisco-IOS-XR-infra-syslog-cfg:vrf[Cisco-IOS-XR-infra-syslog-cfg:vrf-name = ' + str(self.vrf_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf_name is not None:
return True
if self.hosts is not None and self.hosts._has_data():
return True
if self.ipv4s is not None and self.ipv4s._has_data():
return True
if self.ipv6s is not None and self.ipv6s._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs.Vrf']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:host-server/Cisco-IOS-XR-infra-syslog-cfg:vrfs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf is not None:
for child_ref in self.vrf:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer.Vrfs']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:host-server'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrfs is not None and self.vrfs._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.HostServer']['meta_info']
class ConsoleLogging(object):
"""
Set console logging
.. attribute:: console_discriminator
Set console logging discriminators
**type**\: :py:class:`ConsoleDiscriminator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.ConsoleLogging.ConsoleDiscriminator>`
.. attribute:: logging_level
Console logging level
**type**\: :py:class:`LoggingLevelsEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingLevelsEnum>`
**default value**\: warning
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.console_discriminator = Syslog.ConsoleLogging.ConsoleDiscriminator()
self.console_discriminator.parent = self
self.logging_level = None
class ConsoleDiscriminator(object):
"""
Set console logging discriminators
.. attribute:: match1
Set console logging match1 discriminator
**type**\: str
.. attribute:: match2
Set console logging match2 discriminator
**type**\: str
.. attribute:: match3
Set console logging match3 discriminator
**type**\: str
.. attribute:: nomatch1
Set console logging no\-match1 discriminator
**type**\: str
.. attribute:: nomatch2
Set console logging no\-match2 discriminator
**type**\: str
.. attribute:: nomatch3
Set console logging no\-match3 discriminator
**type**\: str
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.match1 = None
self.match2 = None
self.match3 = None
self.nomatch1 = None
self.nomatch2 = None
self.nomatch3 = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:console-logging/Cisco-IOS-XR-infra-syslog-cfg:console-discriminator'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.match1 is not None:
return True
if self.match2 is not None:
return True
if self.match3 is not None:
return True
if self.nomatch1 is not None:
return True
if self.nomatch2 is not None:
return True
if self.nomatch3 is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.ConsoleLogging.ConsoleDiscriminator']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:console-logging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.console_discriminator is not None and self.console_discriminator._has_data():
return True
if self.logging_level is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.ConsoleLogging']['meta_info']
class Files(object):
"""
Configure logging file destination
.. attribute:: file
Specify File Name
**type**\: list of :py:class:`File <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Files.File>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.file = YList()
self.file.parent = self
self.file.name = 'file'
class File(object):
"""
Specify File Name
.. attribute:: file_name <key>
Name of the file
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: file_log_attributes
Attributes of the logging file destination
**type**\: :py:class:`FileLogAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Files.File.FileLogAttributes>`
.. attribute:: file_log_discriminator
Set File logging discriminators
**type**\: :py:class:`FileLogDiscriminator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Files.File.FileLogDiscriminator>`
.. attribute:: file_specification
Specifications of the logging file destination
**type**\: :py:class:`FileSpecification <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Files.File.FileSpecification>`
**status**\: obsolete
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.file_name = None
self.file_log_attributes = Syslog.Files.File.FileLogAttributes()
self.file_log_attributes.parent = self
self.file_log_discriminator = Syslog.Files.File.FileLogDiscriminator()
self.file_log_discriminator.parent = self
self.file_specification = Syslog.Files.File.FileSpecification()
self.file_specification.parent = self
class FileSpecification(object):
"""
Specifications of the logging file destination
.. attribute:: max_file_size
Maximum file size (in KB)
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 1024
.. attribute:: path
File path
**type**\: str
.. attribute:: severity
Severity of messages
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 6
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.max_file_size = None
self.path = None
self.severity = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:file-specification'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.max_file_size is not None:
return True
if self.path is not None:
return True
if self.severity is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Files.File.FileSpecification']['meta_info']
class FileLogAttributes(object):
"""
Attributes of the logging file destination
.. attribute:: max_file_size
Maximum file size (in KB)
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 1024
.. attribute:: severity
Severity of messages
**type**\: int
**range:** \-2147483648..2147483647
**default value**\: 6
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.max_file_size = None
self.severity = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:file-log-attributes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.max_file_size is not None:
return True
if self.severity is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Files.File.FileLogAttributes']['meta_info']
class FileLogDiscriminator(object):
"""
Set File logging discriminators
.. attribute:: match1
Set file logging match discriminator 1
**type**\: str
.. attribute:: match2
Set file logging match discriminator 2
**type**\: str
.. attribute:: match3
Set file logging match discriminator 3
**type**\: str
.. attribute:: nomatch1
Set file logging no match discriminator 1
**type**\: str
.. attribute:: nomatch2
Set file logging no match discriminator 2
**type**\: str
.. attribute:: nomatch3
Set file logging no match discriminator 3
**type**\: str
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.match1 = None
self.match2 = None
self.match3 = None
self.nomatch1 = None
self.nomatch2 = None
self.nomatch3 = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:file-log-discriminator'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.match1 is not None:
return True
if self.match2 is not None:
return True
if self.match3 is not None:
return True
if self.nomatch1 is not None:
return True
if self.nomatch2 is not None:
return True
if self.nomatch3 is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Files.File.FileLogDiscriminator']['meta_info']
@property
def _common_path(self):
if self.file_name is None:
raise YPYModelError('Key property file_name is None')
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:files/Cisco-IOS-XR-infra-syslog-cfg:file[Cisco-IOS-XR-infra-syslog-cfg:file-name = ' + str(self.file_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.file_name is not None:
return True
if self.file_log_attributes is not None and self.file_log_attributes._has_data():
return True
if self.file_log_discriminator is not None and self.file_log_discriminator._has_data():
return True
if self.file_specification is not None and self.file_specification._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Files.File']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:files'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.file is not None:
for child_ref in self.file:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Files']['meta_info']
class Ipv4(object):
"""
Syslog TOS bit for outgoing messages
.. attribute:: dscp
DSCP value
**type**\: :py:class:`Dscp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Ipv4.Dscp>`
**presence node**\: True
**status**\: obsolete
.. attribute:: precedence
Precedence value
**type**\: :py:class:`Precedence <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Ipv4.Precedence>`
**presence node**\: True
**status**\: obsolete
.. attribute:: tos
Type of service
**type**\: :py:class:`Tos <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Ipv4.Tos>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.dscp = None
self.precedence = None
self.tos = Syslog.Ipv4.Tos()
self.tos.parent = self
class Dscp(object):
"""
DSCP value
.. attribute:: type
Logging TOS type DSCP
**type**\: :py:class:`LoggingDscpEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingDscpEnum>`
**mandatory**\: True
.. attribute:: unused
Unused
**type**\: one of the below types:
**type**\: :py:class:`LoggingPrecedenceValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingPrecedenceValueEnum>`
----
**type**\: int
**range:** 0..7
----
.. attribute:: value
Logging DSCP value
**type**\: one of the below types:
**type**\: :py:class:`LoggingDscpValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingDscpValueEnum>`
**mandatory**\: True
----
**type**\: int
**range:** 0..63
**mandatory**\: True
----
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self._is_presence = True
self.type = None
self.unused = None
self.value = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:ipv4/Cisco-IOS-XR-infra-syslog-cfg:dscp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.type is not None:
return True
if self.unused is not None:
return True
if self.value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Ipv4.Dscp']['meta_info']
class Tos(object):
"""
Type of service
.. attribute:: dscp
Logging DSCP value
**type**\: one of the below types:
**type**\: :py:class:`LoggingDscpValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingDscpValueEnum>`
----
**type**\: int
**range:** 0..63
----
.. attribute:: precedence
Logging precedence value
**type**\: one of the below types:
**type**\: :py:class:`LoggingPrecedenceValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingPrecedenceValueEnum>`
----
**type**\: int
**range:** 0..7
----
.. attribute:: type
Logging TOS type DSCP or precedence
**type**\: :py:class:`LoggingTosEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingTosEnum>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.dscp = None
self.precedence = None
self.type = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:ipv4/Cisco-IOS-XR-infra-syslog-cfg:tos'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dscp is not None:
return True
if self.precedence is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Ipv4.Tos']['meta_info']
class Precedence(object):
"""
Precedence value
.. attribute:: type
Logging TOS type precedence
**type**\: :py:class:`LoggingPrecedenceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingPrecedenceEnum>`
**mandatory**\: True
.. attribute:: unused
Unused
**type**\: one of the below types:
**type**\: :py:class:`LoggingDscpValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingDscpValueEnum>`
----
**type**\: int
**range:** 0..63
----
.. attribute:: value
Logging precedence value
**type**\: one of the below types:
**type**\: :py:class:`LoggingPrecedenceValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingPrecedenceValueEnum>`
**mandatory**\: True
----
**type**\: int
**range:** 0..7
**mandatory**\: True
----
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self._is_presence = True
self.type = None
self.unused = None
self.value = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:ipv4/Cisco-IOS-XR-infra-syslog-cfg:precedence'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.type is not None:
return True
if self.unused is not None:
return True
if self.value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Ipv4.Precedence']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:ipv4'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dscp is not None and self.dscp._has_data():
return True
if self.precedence is not None and self.precedence._has_data():
return True
if self.tos is not None and self.tos._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Ipv4']['meta_info']
class Archive(object):
"""
Archive attributes configuration
.. attribute:: device
'/disk0\:' or '/disk1\:' or '/harddisk\:'
**type**\: str
.. attribute:: file_size
The maximum file size for a single log file
**type**\: int
**range:** 1..2047
.. attribute:: frequency
The collection interval for logs
**type**\: :py:class:`LogCollectFrequencyEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LogCollectFrequencyEnum>`
.. attribute:: length
The maximum number of weeks of log to maintain
**type**\: int
**range:** 1..256
.. attribute:: severity
The minimum severity of log messages to archive
**type**\: :py:class:`LogMessageSeverityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LogMessageSeverityEnum>`
.. attribute:: size
The total size of the archive
**type**\: int
**range:** 1..2047
.. attribute:: threshold
The size threshold at which a syslog is generated
**type**\: int
**range:** 1..99
**units**\: percentage
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.device = None
self.file_size = None
self.frequency = None
self.length = None
self.severity = None
self.size = None
self.threshold = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:archive'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.device is not None:
return True
if self.file_size is not None:
return True
if self.frequency is not None:
return True
if self.length is not None:
return True
if self.severity is not None:
return True
if self.size is not None:
return True
if self.threshold is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Archive']['meta_info']
class Ipv6(object):
"""
Syslog traffic class bit for outgoing messages
.. attribute:: dscp
DSCP value
**type**\: :py:class:`Dscp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Ipv6.Dscp>`
**presence node**\: True
**status**\: obsolete
.. attribute:: precedence
Precedence value
**type**\: :py:class:`Precedence <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Ipv6.Precedence>`
**presence node**\: True
**status**\: obsolete
.. attribute:: traffic_class
Type of traffic class
**type**\: :py:class:`TrafficClass <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Ipv6.TrafficClass>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.dscp = None
self.precedence = None
self.traffic_class = Syslog.Ipv6.TrafficClass()
self.traffic_class.parent = self
class Dscp(object):
"""
DSCP value
.. attribute:: type
Logging TOS type DSCP
**type**\: :py:class:`LoggingDscpEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingDscpEnum>`
**mandatory**\: True
.. attribute:: unused
Unused
**type**\: one of the below types:
**type**\: :py:class:`LoggingPrecedenceValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingPrecedenceValueEnum>`
----
**type**\: int
**range:** 0..7
----
.. attribute:: value
Logging DSCP value
**type**\: one of the below types:
**type**\: :py:class:`LoggingDscpValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingDscpValueEnum>`
**mandatory**\: True
----
**type**\: int
**range:** 0..63
**mandatory**\: True
----
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self._is_presence = True
self.type = None
self.unused = None
self.value = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:ipv6/Cisco-IOS-XR-infra-syslog-cfg:dscp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.type is not None:
return True
if self.unused is not None:
return True
if self.value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Ipv6.Dscp']['meta_info']
class TrafficClass(object):
"""
Type of traffic class
.. attribute:: dscp
Logging DSCP value
**type**\: one of the below types:
**type**\: :py:class:`LoggingDscpValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingDscpValueEnum>`
----
**type**\: int
**range:** 0..63
----
.. attribute:: precedence
Logging precedence value
**type**\: one of the below types:
**type**\: :py:class:`LoggingPrecedenceValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingPrecedenceValueEnum>`
----
**type**\: int
**range:** 0..7
----
.. attribute:: type
Logging TOS type DSCP or precedence
**type**\: :py:class:`LoggingTosEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingTosEnum>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.dscp = None
self.precedence = None
self.type = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:ipv6/Cisco-IOS-XR-infra-syslog-cfg:traffic-class'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dscp is not None:
return True
if self.precedence is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Ipv6.TrafficClass']['meta_info']
class Precedence(object):
"""
Precedence value
.. attribute:: type
Logging TOS type precedence
**type**\: :py:class:`LoggingPrecedenceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingPrecedenceEnum>`
**mandatory**\: True
.. attribute:: unused
Unused
**type**\: one of the below types:
**type**\: :py:class:`LoggingDscpValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingDscpValueEnum>`
----
**type**\: int
**range:** 0..63
----
.. attribute:: value
Logging precedence value
**type**\: one of the below types:
**type**\: :py:class:`LoggingPrecedenceValueEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.LoggingPrecedenceValueEnum>`
**mandatory**\: True
----
**type**\: int
**range:** 0..7
**mandatory**\: True
----
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self._is_presence = True
self.type = None
self.unused = None
self.value = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:ipv6/Cisco-IOS-XR-infra-syslog-cfg:precedence'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.type is not None:
return True
if self.unused is not None:
return True
if self.value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Ipv6.Precedence']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:ipv6'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dscp is not None and self.dscp._has_data():
return True
if self.precedence is not None and self.precedence._has_data():
return True
if self.traffic_class is not None and self.traffic_class._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Ipv6']['meta_info']
class SourceInterfaceTable(object):
"""
Configure source interface
.. attribute:: source_interface_values
Specify interface for source address in logging transactions
**type**\: :py:class:`SourceInterfaceValues <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.SourceInterfaceTable.SourceInterfaceValues>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.source_interface_values = Syslog.SourceInterfaceTable.SourceInterfaceValues()
self.source_interface_values.parent = self
class SourceInterfaceValues(object):
"""
Specify interface for source address in logging
transactions
.. attribute:: source_interface_value
Source interface
**type**\: list of :py:class:`SourceInterfaceValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.SourceInterfaceTable.SourceInterfaceValues.SourceInterfaceValue>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.source_interface_value = YList()
self.source_interface_value.parent = self
self.source_interface_value.name = 'source_interface_value'
class SourceInterfaceValue(object):
"""
Source interface
.. attribute:: src_interface_name_value <key>
Which Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: source_interface_vrfs
Configure source interface VRF
**type**\: :py:class:`SourceInterfaceVrfs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.SourceInterfaceTable.SourceInterfaceValues.SourceInterfaceValue.SourceInterfaceVrfs>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.src_interface_name_value = None
self.source_interface_vrfs = Syslog.SourceInterfaceTable.SourceInterfaceValues.SourceInterfaceValue.SourceInterfaceVrfs()
self.source_interface_vrfs.parent = self
class SourceInterfaceVrfs(object):
"""
Configure source interface VRF
.. attribute:: source_interface_vrf
Specify VRF for source interface
**type**\: list of :py:class:`SourceInterfaceVrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.SourceInterfaceTable.SourceInterfaceValues.SourceInterfaceValue.SourceInterfaceVrfs.SourceInterfaceVrf>`
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.source_interface_vrf = YList()
self.source_interface_vrf.parent = self
self.source_interface_vrf.name = 'source_interface_vrf'
class SourceInterfaceVrf(object):
"""
Specify VRF for source interface
.. attribute:: vrf_name <key>
Name of the VRF instance
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
"""
_prefix = 'infra-syslog-cfg'
_revision = '2016-06-22'
def __init__(self):
self.parent = None
self.vrf_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.vrf_name is None:
raise YPYModelError('Key property vrf_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:source-interface-vrf[Cisco-IOS-XR-infra-syslog-cfg:vrf-name = ' + str(self.vrf_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.SourceInterfaceTable.SourceInterfaceValues.SourceInterfaceValue.SourceInterfaceVrfs.SourceInterfaceVrf']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-syslog-cfg:source-interface-vrfs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.source_interface_vrf is not None:
for child_ref in self.source_interface_vrf:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.SourceInterfaceTable.SourceInterfaceValues.SourceInterfaceValue.SourceInterfaceVrfs']['meta_info']
@property
def _common_path(self):
if self.src_interface_name_value is None:
raise YPYModelError('Key property src_interface_name_value is None')
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:source-interface-table/Cisco-IOS-XR-infra-syslog-cfg:source-interface-values/Cisco-IOS-XR-infra-syslog-cfg:source-interface-value[Cisco-IOS-XR-infra-syslog-cfg:src-interface-name-value = ' + str(self.src_interface_name_value) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.src_interface_name_value is not None:
return True
if self.source_interface_vrfs is not None and self.source_interface_vrfs._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.SourceInterfaceTable.SourceInterfaceValues.SourceInterfaceValue']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:source-interface-table/Cisco-IOS-XR-infra-syslog-cfg:source-interface-values'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.source_interface_value is not None:
for child_ref in self.source_interface_value:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.SourceInterfaceTable.SourceInterfaceValues']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-syslog-cfg:source-interface-table'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.source_interface_values is not None and self.source_interface_values._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.SourceInterfaceTable']['meta_info']
class AlarmLogger(object):
"""
Alarm Logger Properties
.. attribute:: buffer_size
Set size of the local event buffer
**type**\: int
**range:** 1024..1024000
.. attribute:: severity_level
Log all events with equal or higher (lower level) severity than this
**type**\: :py:class:`AlarmLoggerSeverityLevelEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_alarm_logger_datatypes.AlarmLoggerSeverityLevelEnum>`
.. attribute:: source_location
Enable alarm source location in message text
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: threshold
Configure threshold (%) for capacity alarm
**type**\: int
**range:** 10..100
**default value**\: 90
"""
_prefix = 'infra-alarm-logger-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.buffer_size = None
self.severity_level = None
self.source_location = None
self.threshold = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-alarm-logger-cfg:alarm-logger'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.buffer_size is not None:
return True
if self.severity_level is not None:
return True
if self.source_location is not None:
return True
if self.threshold is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.AlarmLogger']['meta_info']
class Correlator(object):
"""
Configure properties of the event correlator
.. attribute:: buffer_size
Configure size of the correlator buffer
**type**\: int
**range:** 1024..52428800
.. attribute:: rule_sets
Table of configured rulesets
**type**\: :py:class:`RuleSets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets>`
.. attribute:: rules
Table of configured rules
**type**\: :py:class:`Rules <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.buffer_size = None
self.rule_sets = Syslog.Correlator.RuleSets()
self.rule_sets.parent = self
self.rules = Syslog.Correlator.Rules()
self.rules.parent = self
class Rules(object):
"""
Table of configured rules
.. attribute:: rule
Rule name
**type**\: list of :py:class:`Rule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rule = YList()
self.rule.parent = self
self.rule.name = 'rule'
class Rule(object):
"""
Rule name
.. attribute:: name <key>
Rule name
**type**\: str
**length:** 0..32
.. attribute:: applied_to
Applied to the Rule or Ruleset
**type**\: :py:class:`AppliedTo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.AppliedTo>`
.. attribute:: apply_to
Apply the Rules
**type**\: :py:class:`ApplyTo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.ApplyTo>`
.. attribute:: definition
Configure a specified correlation rule
**type**\: :py:class:`Definition <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.Definition>`
.. attribute:: non_stateful
The Non\-Stateful Rule Type
**type**\: :py:class:`NonStateful <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.NonStateful>`
.. attribute:: stateful
The Stateful Rule Type
**type**\: :py:class:`Stateful <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.Stateful>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.applied_to = Syslog.Correlator.Rules.Rule.AppliedTo()
self.applied_to.parent = self
self.apply_to = Syslog.Correlator.Rules.Rule.ApplyTo()
self.apply_to.parent = self
self.definition = Syslog.Correlator.Rules.Rule.Definition()
self.definition.parent = self
self.non_stateful = Syslog.Correlator.Rules.Rule.NonStateful()
self.non_stateful.parent = self
self.stateful = Syslog.Correlator.Rules.Rule.Stateful()
self.stateful.parent = self
class Definition(object):
"""
Configure a specified correlation rule
.. attribute:: category_name_entry1
Root message category name
**type**\: str
.. attribute:: category_name_entry10
Correlated message category name
**type**\: str
.. attribute:: category_name_entry2
Correlated message category name
**type**\: str
.. attribute:: category_name_entry3
Correlated message category name
**type**\: str
.. attribute:: category_name_entry4
Correlated message category name
**type**\: str
.. attribute:: category_name_entry5
Correlated message category name
**type**\: str
.. attribute:: category_name_entry6
Correlated message category name
**type**\: str
.. attribute:: category_name_entry7
Correlated message category name
**type**\: str
.. attribute:: category_name_entry8
Correlated message category name
**type**\: str
.. attribute:: category_name_entry9
Correlated message category name
**type**\: str
.. attribute:: group_name_entry1
Root message group name
**type**\: str
.. attribute:: group_name_entry10
Correlated message group name
**type**\: str
.. attribute:: group_name_entry2
Correlated message group name
**type**\: str
.. attribute:: group_name_entry3
Correlated message group name
**type**\: str
.. attribute:: group_name_entry4
Correlated message group name
**type**\: str
.. attribute:: group_name_entry5
Correlated message group name
**type**\: str
.. attribute:: group_name_entry6
Correlated message group name
**type**\: str
.. attribute:: group_name_entry7
Correlated message group name
**type**\: str
.. attribute:: group_name_entry8
Correlated message group name
**type**\: str
.. attribute:: group_name_entry9
Correlated message group name
**type**\: str
.. attribute:: message_code_entry1
Root message code
**type**\: str
.. attribute:: message_code_entry10
Correlated message code
**type**\: str
.. attribute:: message_code_entry2
Correlated message code
**type**\: str
.. attribute:: message_code_entry3
Correlated message code
**type**\: str
.. attribute:: message_code_entry4
Correlated message code
**type**\: str
.. attribute:: message_code_entry5
Correlated message code
**type**\: str
.. attribute:: message_code_entry6
Correlated message code
**type**\: str
.. attribute:: message_code_entry7
Correlated message code
**type**\: str
.. attribute:: message_code_entry8
Correlated message code
**type**\: str
.. attribute:: message_code_entry9
Correlated message code
**type**\: str
.. attribute:: timeout
Timeout (time the rule is to be active) in milliseconds
**type**\: int
**range:** 1..7200000
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.category_name_entry1 = None
self.category_name_entry10 = None
self.category_name_entry2 = None
self.category_name_entry3 = None
self.category_name_entry4 = None
self.category_name_entry5 = None
self.category_name_entry6 = None
self.category_name_entry7 = None
self.category_name_entry8 = None
self.category_name_entry9 = None
self.group_name_entry1 = None
self.group_name_entry10 = None
self.group_name_entry2 = None
self.group_name_entry3 = None
self.group_name_entry4 = None
self.group_name_entry5 = None
self.group_name_entry6 = None
self.group_name_entry7 = None
self.group_name_entry8 = None
self.group_name_entry9 = None
self.message_code_entry1 = None
self.message_code_entry10 = None
self.message_code_entry2 = None
self.message_code_entry3 = None
self.message_code_entry4 = None
self.message_code_entry5 = None
self.message_code_entry6 = None
self.message_code_entry7 = None
self.message_code_entry8 = None
self.message_code_entry9 = None
self.timeout = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:definition'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.category_name_entry1 is not None:
return True
if self.category_name_entry10 is not None:
return True
if self.category_name_entry2 is not None:
return True
if self.category_name_entry3 is not None:
return True
if self.category_name_entry4 is not None:
return True
if self.category_name_entry5 is not None:
return True
if self.category_name_entry6 is not None:
return True
if self.category_name_entry7 is not None:
return True
if self.category_name_entry8 is not None:
return True
if self.category_name_entry9 is not None:
return True
if self.group_name_entry1 is not None:
return True
if self.group_name_entry10 is not None:
return True
if self.group_name_entry2 is not None:
return True
if self.group_name_entry3 is not None:
return True
if self.group_name_entry4 is not None:
return True
if self.group_name_entry5 is not None:
return True
if self.group_name_entry6 is not None:
return True
if self.group_name_entry7 is not None:
return True
if self.group_name_entry8 is not None:
return True
if self.group_name_entry9 is not None:
return True
if self.message_code_entry1 is not None:
return True
if self.message_code_entry10 is not None:
return True
if self.message_code_entry2 is not None:
return True
if self.message_code_entry3 is not None:
return True
if self.message_code_entry4 is not None:
return True
if self.message_code_entry5 is not None:
return True
if self.message_code_entry6 is not None:
return True
if self.message_code_entry7 is not None:
return True
if self.message_code_entry8 is not None:
return True
if self.message_code_entry9 is not None:
return True
if self.timeout is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.Definition']['meta_info']
class NonStateful(object):
"""
The Non\-Stateful Rule Type
.. attribute:: context_correlation
Enable correlation on alarm context
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: non_root_causes
Table of configured non\-rootcause
**type**\: :py:class:`NonRootCauses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.NonStateful.NonRootCauses>`
.. attribute:: root_cause
The root cause
**type**\: :py:class:`RootCause <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.NonStateful.RootCause>`
.. attribute:: timeout
Timeout (time to wait for active correlation) in milliseconds
**type**\: int
**range:** 1..7200000
**units**\: millisecond
.. attribute:: timeout_root_cause
Rootcause Timeout (time to wait for rootcause) in milliseconds
**type**\: int
**range:** 1..7200000
**units**\: millisecond
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.context_correlation = None
self.non_root_causes = Syslog.Correlator.Rules.Rule.NonStateful.NonRootCauses()
self.non_root_causes.parent = self
self.root_cause = Syslog.Correlator.Rules.Rule.NonStateful.RootCause()
self.root_cause.parent = self
self.timeout = None
self.timeout_root_cause = None
class NonRootCauses(object):
"""
Table of configured non\-rootcause
.. attribute:: non_root_cause
A non\-rootcause
**type**\: list of :py:class:`NonRootCause <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.NonStateful.NonRootCauses.NonRootCause>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.non_root_cause = YList()
self.non_root_cause.parent = self
self.non_root_cause.name = 'non_root_cause'
class NonRootCause(object):
"""
A non\-rootcause
.. attribute:: category <key>
Correlated message category
**type**\: str
.. attribute:: group <key>
Correlated message group
**type**\: str
.. attribute:: message_code <key>
Correlated message code
**type**\: str
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.category = None
self.group = None
self.message_code = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.category is None:
raise YPYModelError('Key property category is None')
if self.group is None:
raise YPYModelError('Key property group is None')
if self.message_code is None:
raise YPYModelError('Key property message_code is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:non-root-cause[Cisco-IOS-XR-infra-correlator-cfg:category = ' + str(self.category) + '][Cisco-IOS-XR-infra-correlator-cfg:group = ' + str(self.group) + '][Cisco-IOS-XR-infra-correlator-cfg:message-code = ' + str(self.message_code) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.category is not None:
return True
if self.group is not None:
return True
if self.message_code is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.NonStateful.NonRootCauses.NonRootCause']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:non-root-causes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.non_root_cause is not None:
for child_ref in self.non_root_cause:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.NonStateful.NonRootCauses']['meta_info']
class RootCause(object):
"""
The root cause
.. attribute:: category
Root message category
**type**\: str
.. attribute:: group
Root message group
**type**\: str
.. attribute:: message_code
Root message code
**type**\: str
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.category = None
self.group = None
self.message_code = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:root-cause'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.category is not None:
return True
if self.group is not None:
return True
if self.message_code is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.NonStateful.RootCause']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:non-stateful'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.context_correlation is not None:
return True
if self.non_root_causes is not None and self.non_root_causes._has_data():
return True
if self.root_cause is not None and self.root_cause._has_data():
return True
if self.timeout is not None:
return True
if self.timeout_root_cause is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.NonStateful']['meta_info']
class Stateful(object):
"""
The Stateful Rule Type
.. attribute:: context_correlation
Enable correlation on alarm context
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: non_root_causes
Table of configured non\-rootcause
**type**\: :py:class:`NonRootCauses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.Stateful.NonRootCauses>`
.. attribute:: reissue
Enable reissue of non\-bistate alarms on rootcause alarm clear
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: reparent
Enable reparent of alarm on rootcause alarm clear
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: root_cause
The root cause
**type**\: :py:class:`RootCause <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.Stateful.RootCause>`
.. attribute:: timeout
Timeout (time to wait for active correlation) in milliseconds
**type**\: int
**range:** 1..7200000
**units**\: millisecond
.. attribute:: timeout_root_cause
Rootcause Timeout (time to wait for rootcause) in milliseconds
**type**\: int
**range:** 1..7200000
**units**\: millisecond
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.context_correlation = None
self.non_root_causes = Syslog.Correlator.Rules.Rule.Stateful.NonRootCauses()
self.non_root_causes.parent = self
self.reissue = None
self.reparent = None
self.root_cause = Syslog.Correlator.Rules.Rule.Stateful.RootCause()
self.root_cause.parent = self
self.timeout = None
self.timeout_root_cause = None
class NonRootCauses(object):
"""
Table of configured non\-rootcause
.. attribute:: non_root_cause
A non\-rootcause
**type**\: list of :py:class:`NonRootCause <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.Stateful.NonRootCauses.NonRootCause>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.non_root_cause = YList()
self.non_root_cause.parent = self
self.non_root_cause.name = 'non_root_cause'
class NonRootCause(object):
"""
A non\-rootcause
.. attribute:: category <key>
Correlated message category
**type**\: str
.. attribute:: group <key>
Correlated message group
**type**\: str
.. attribute:: message_code <key>
Correlated message code
**type**\: str
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.category = None
self.group = None
self.message_code = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.category is None:
raise YPYModelError('Key property category is None')
if self.group is None:
raise YPYModelError('Key property group is None')
if self.message_code is None:
raise YPYModelError('Key property message_code is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:non-root-cause[Cisco-IOS-XR-infra-correlator-cfg:category = ' + str(self.category) + '][Cisco-IOS-XR-infra-correlator-cfg:group = ' + str(self.group) + '][Cisco-IOS-XR-infra-correlator-cfg:message-code = ' + str(self.message_code) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.category is not None:
return True
if self.group is not None:
return True
if self.message_code is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.Stateful.NonRootCauses.NonRootCause']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:non-root-causes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.non_root_cause is not None:
for child_ref in self.non_root_cause:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.Stateful.NonRootCauses']['meta_info']
class RootCause(object):
"""
The root cause
.. attribute:: category
Root message category
**type**\: str
.. attribute:: group
Root message group
**type**\: str
.. attribute:: message_code
Root message code
**type**\: str
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.category = None
self.group = None
self.message_code = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:root-cause'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.category is not None:
return True
if self.group is not None:
return True
if self.message_code is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.Stateful.RootCause']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:stateful'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.context_correlation is not None:
return True
if self.non_root_causes is not None and self.non_root_causes._has_data():
return True
if self.reissue is not None:
return True
if self.reparent is not None:
return True
if self.root_cause is not None and self.root_cause._has_data():
return True
if self.timeout is not None:
return True
if self.timeout_root_cause is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.Stateful']['meta_info']
class ApplyTo(object):
"""
Apply the Rules
.. attribute:: all_of_router
Apply the rule to all of the router
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: contexts
Apply rule to a specified list of contexts, e.g. interfaces
**type**\: :py:class:`Contexts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.ApplyTo.Contexts>`
.. attribute:: locations
Apply rule to a specified list of Locations
**type**\: :py:class:`Locations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.ApplyTo.Locations>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.all_of_router = None
self.contexts = Syslog.Correlator.Rules.Rule.ApplyTo.Contexts()
self.contexts.parent = self
self.locations = Syslog.Correlator.Rules.Rule.ApplyTo.Locations()
self.locations.parent = self
class Contexts(object):
"""
Apply rule to a specified list of contexts,
e.g. interfaces
.. attribute:: context
One or more context names
**type**\: list of str
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.context = YLeafList()
self.context.parent = self
self.context.name = 'context'
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:contexts'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.context is not None:
for child in self.context:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.ApplyTo.Contexts']['meta_info']
class Locations(object):
"""
Apply rule to a specified list of Locations
.. attribute:: location
One or more Locations
**type**\: list of str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.location = YLeafList()
self.location.parent = self
self.location.name = 'location'
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:locations'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.location is not None:
for child in self.location:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.ApplyTo.Locations']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:apply-to'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.all_of_router is not None:
return True
if self.contexts is not None and self.contexts._has_data():
return True
if self.locations is not None and self.locations._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.ApplyTo']['meta_info']
class AppliedTo(object):
"""
Applied to the Rule or Ruleset
.. attribute:: all
Apply to all of the router
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: contexts
Table of configured contexts to apply
**type**\: :py:class:`Contexts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.AppliedTo.Contexts>`
.. attribute:: locations
Table of configured locations to apply
**type**\: :py:class:`Locations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.AppliedTo.Locations>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.all = None
self.contexts = Syslog.Correlator.Rules.Rule.AppliedTo.Contexts()
self.contexts.parent = self
self.locations = Syslog.Correlator.Rules.Rule.AppliedTo.Locations()
self.locations.parent = self
class Contexts(object):
"""
Table of configured contexts to apply
.. attribute:: context
A context
**type**\: list of :py:class:`Context <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.AppliedTo.Contexts.Context>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.context = YList()
self.context.parent = self
self.context.name = 'context'
class Context(object):
"""
A context
.. attribute:: context <key>
Context
**type**\: str
**length:** 0..32
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.context = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.context is None:
raise YPYModelError('Key property context is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:context[Cisco-IOS-XR-infra-correlator-cfg:context = ' + str(self.context) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.context is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.AppliedTo.Contexts.Context']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:contexts'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.context is not None:
for child_ref in self.context:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.AppliedTo.Contexts']['meta_info']
class Locations(object):
"""
Table of configured locations to apply
.. attribute:: location
A location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.Rules.Rule.AppliedTo.Locations.Location>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.location = YList()
self.location.parent = self
self.location.name = 'location'
class Location(object):
"""
A location
.. attribute:: location <key>
Location
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.location = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.location is None:
raise YPYModelError('Key property location is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:location[Cisco-IOS-XR-infra-correlator-cfg:location = ' + str(self.location) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.location is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.AppliedTo.Locations.Location']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:locations'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.location is not None:
for child_ref in self.location:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.AppliedTo.Locations']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:applied-to'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.all is not None:
return True
if self.contexts is not None and self.contexts._has_data():
return True
if self.locations is not None and self.locations._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule.AppliedTo']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-correlator-cfg:correlator/Cisco-IOS-XR-infra-correlator-cfg:rules/Cisco-IOS-XR-infra-correlator-cfg:rule[Cisco-IOS-XR-infra-correlator-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.applied_to is not None and self.applied_to._has_data():
return True
if self.apply_to is not None and self.apply_to._has_data():
return True
if self.definition is not None and self.definition._has_data():
return True
if self.non_stateful is not None and self.non_stateful._has_data():
return True
if self.stateful is not None and self.stateful._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules.Rule']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-correlator-cfg:correlator/Cisco-IOS-XR-infra-correlator-cfg:rules'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.rule is not None:
for child_ref in self.rule:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.Rules']['meta_info']
class RuleSets(object):
"""
Table of configured rulesets
.. attribute:: rule_set
Ruleset name
**type**\: list of :py:class:`RuleSet <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets.RuleSet>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rule_set = YList()
self.rule_set.parent = self
self.rule_set.name = 'rule_set'
class RuleSet(object):
"""
Ruleset name
.. attribute:: name <key>
Ruleset name
**type**\: str
**length:** 0..32
.. attribute:: applied_to
Applied to the Rule or Ruleset
**type**\: :py:class:`AppliedTo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets.RuleSet.AppliedTo>`
.. attribute:: rulenames
Table of configured rulenames
**type**\: :py:class:`Rulenames <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets.RuleSet.Rulenames>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.applied_to = Syslog.Correlator.RuleSets.RuleSet.AppliedTo()
self.applied_to.parent = self
self.rulenames = Syslog.Correlator.RuleSets.RuleSet.Rulenames()
self.rulenames.parent = self
class Rulenames(object):
"""
Table of configured rulenames
.. attribute:: rulename
A rulename
**type**\: list of :py:class:`Rulename <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets.RuleSet.Rulenames.Rulename>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rulename = YList()
self.rulename.parent = self
self.rulename.name = 'rulename'
class Rulename(object):
"""
A rulename
.. attribute:: rulename <key>
Rule name
**type**\: str
**length:** 0..32
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rulename = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.rulename is None:
raise YPYModelError('Key property rulename is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:rulename[Cisco-IOS-XR-infra-correlator-cfg:rulename = ' + str(self.rulename) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.rulename is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets.RuleSet.Rulenames.Rulename']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:rulenames'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.rulename is not None:
for child_ref in self.rulename:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets.RuleSet.Rulenames']['meta_info']
class AppliedTo(object):
"""
Applied to the Rule or Ruleset
.. attribute:: all
Apply to all of the router
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: contexts
Table of configured contexts to apply
**type**\: :py:class:`Contexts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Contexts>`
.. attribute:: locations
Table of configured locations to apply
**type**\: :py:class:`Locations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Locations>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.all = None
self.contexts = Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Contexts()
self.contexts.parent = self
self.locations = Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Locations()
self.locations.parent = self
class Contexts(object):
"""
Table of configured contexts to apply
.. attribute:: context
A context
**type**\: list of :py:class:`Context <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Contexts.Context>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.context = YList()
self.context.parent = self
self.context.name = 'context'
class Context(object):
"""
A context
.. attribute:: context <key>
Context
**type**\: str
**length:** 0..32
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.context = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.context is None:
raise YPYModelError('Key property context is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:context[Cisco-IOS-XR-infra-correlator-cfg:context = ' + str(self.context) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.context is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Contexts.Context']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:contexts'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.context is not None:
for child_ref in self.context:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Contexts']['meta_info']
class Locations(object):
"""
Table of configured locations to apply
.. attribute:: location
A location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Locations.Location>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.location = YList()
self.location.parent = self
self.location.name = 'location'
class Location(object):
"""
A location
.. attribute:: location <key>
Location
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.location = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.location is None:
raise YPYModelError('Key property location is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:location[Cisco-IOS-XR-infra-correlator-cfg:location = ' + str(self.location) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.location is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Locations.Location']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:locations'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.location is not None:
for child_ref in self.location:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets.RuleSet.AppliedTo.Locations']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:applied-to'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.all is not None:
return True
if self.contexts is not None and self.contexts._has_data():
return True
if self.locations is not None and self.locations._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets.RuleSet.AppliedTo']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-correlator-cfg:correlator/Cisco-IOS-XR-infra-correlator-cfg:rule-sets/Cisco-IOS-XR-infra-correlator-cfg:rule-set[Cisco-IOS-XR-infra-correlator-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.applied_to is not None and self.applied_to._has_data():
return True
if self.rulenames is not None and self.rulenames._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets.RuleSet']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-correlator-cfg:correlator/Cisco-IOS-XR-infra-correlator-cfg:rule-sets'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.rule_set is not None:
for child_ref in self.rule_set:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator.RuleSets']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-correlator-cfg:correlator'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.buffer_size is not None:
return True
if self.rule_sets is not None and self.rule_sets._has_data():
return True
if self.rules is not None and self.rules._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Correlator']['meta_info']
class Suppression(object):
"""
Configure properties of the syslog/alarm
suppression
.. attribute:: rules
Table of configured rules
**type**\: :py:class:`Rules <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Suppression.Rules>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rules = Syslog.Suppression.Rules()
self.rules.parent = self
class Rules(object):
"""
Table of configured rules
.. attribute:: rule
Rule name
**type**\: list of :py:class:`Rule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Suppression.Rules.Rule>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rule = YList()
self.rule.parent = self
self.rule.name = 'rule'
class Rule(object):
"""
Rule name
.. attribute:: name <key>
Rule name
**type**\: str
**length:** 0..32
.. attribute:: alarm_causes
Causes of alarms to be suppressed
**type**\: :py:class:`AlarmCauses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Suppression.Rules.Rule.AlarmCauses>`
.. attribute:: all_alarms
Suppress all alarms
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: applied_to
Applied to the Rule
**type**\: :py:class:`AppliedTo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Suppression.Rules.Rule.AppliedTo>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.alarm_causes = Syslog.Suppression.Rules.Rule.AlarmCauses()
self.alarm_causes.parent = self
self.all_alarms = None
self.applied_to = Syslog.Suppression.Rules.Rule.AppliedTo()
self.applied_to.parent = self
class AppliedTo(object):
"""
Applied to the Rule
.. attribute:: all
Apply to all of the router
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: sources
Table of configured sources to apply
**type**\: :py:class:`Sources <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Suppression.Rules.Rule.AppliedTo.Sources>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.all = None
self.sources = Syslog.Suppression.Rules.Rule.AppliedTo.Sources()
self.sources.parent = self
class Sources(object):
"""
Table of configured sources to apply
.. attribute:: source
An alarm source
**type**\: list of :py:class:`Source <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Suppression.Rules.Rule.AppliedTo.Sources.Source>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.source = YList()
self.source.parent = self
self.source.name = 'source'
class Source(object):
"""
An alarm source
.. attribute:: source <key>
Source
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.source = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.source is None:
raise YPYModelError('Key property source is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:source[Cisco-IOS-XR-infra-correlator-cfg:source = ' + str(self.source) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.source is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Suppression.Rules.Rule.AppliedTo.Sources.Source']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:sources'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.source is not None:
for child_ref in self.source:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Suppression.Rules.Rule.AppliedTo.Sources']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:applied-to'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.all is not None:
return True
if self.sources is not None and self.sources._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Suppression.Rules.Rule.AppliedTo']['meta_info']
class AlarmCauses(object):
"""
Causes of alarms to be suppressed
.. attribute:: alarm_cause
Category, Group and Code of alarm/syslog to be suppressed
**type**\: list of :py:class:`AlarmCause <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_syslog_cfg.Syslog.Suppression.Rules.Rule.AlarmCauses.AlarmCause>`
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.alarm_cause = YList()
self.alarm_cause.parent = self
self.alarm_cause.name = 'alarm_cause'
class AlarmCause(object):
"""
Category, Group and Code of alarm/syslog to
be suppressed
.. attribute:: category <key>
Category
**type**\: str
**length:** 0..32
.. attribute:: code <key>
Code
**type**\: str
**length:** 0..32
.. attribute:: group <key>
Group
**type**\: str
**length:** 0..32
"""
_prefix = 'infra-correlator-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.category = None
self.code = None
self.group = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.category is None:
raise YPYModelError('Key property category is None')
if self.code is None:
raise YPYModelError('Key property code is None')
if self.group is None:
raise YPYModelError('Key property group is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:alarm-cause[Cisco-IOS-XR-infra-correlator-cfg:category = ' + str(self.category) + '][Cisco-IOS-XR-infra-correlator-cfg:code = ' + str(self.code) + '][Cisco-IOS-XR-infra-correlator-cfg:group = ' + str(self.group) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.category is not None:
return True
if self.code is not None:
return True
if self.group is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Suppression.Rules.Rule.AlarmCauses.AlarmCause']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-correlator-cfg:alarm-causes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.alarm_cause is not None:
for child_ref in self.alarm_cause:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Suppression.Rules.Rule.AlarmCauses']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-correlator-cfg:suppression/Cisco-IOS-XR-infra-correlator-cfg:rules/Cisco-IOS-XR-infra-correlator-cfg:rule[Cisco-IOS-XR-infra-correlator-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.alarm_causes is not None and self.alarm_causes._has_data():
return True
if self.all_alarms is not None:
return True
if self.applied_to is not None and self.applied_to._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Suppression.Rules.Rule']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-correlator-cfg:suppression/Cisco-IOS-XR-infra-correlator-cfg:rules'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.rule is not None:
for child_ref in self.rule:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Suppression.Rules']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog/Cisco-IOS-XR-infra-correlator-cfg:suppression'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.rules is not None and self.rules._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog.Suppression']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-syslog-cfg:syslog'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.alarm_logger is not None and self.alarm_logger._has_data():
return True
if self.archive is not None and self.archive._has_data():
return True
if self.buffered_logging is not None and self.buffered_logging._has_data():
return True
if self.console_logging is not None and self.console_logging._has_data():
return True
if self.correlator is not None and self.correlator._has_data():
return True
if self.enable_console_logging is not None:
return True
if self.files is not None and self.files._has_data():
return True
if self.history_logging is not None and self.history_logging._has_data():
return True
if self.host_name_prefix is not None:
return True
if self.host_server is not None and self.host_server._has_data():
return True
if self.ipv4 is not None and self.ipv4._has_data():
return True
if self.ipv6 is not None and self.ipv6._has_data():
return True
if self.local_log_file_size is not None:
return True
if self.logging_facilities is not None and self.logging_facilities._has_data():
return True
if self.monitor_logging is not None and self.monitor_logging._has_data():
return True
if self.source_interface_table is not None and self.source_interface_table._has_data():
return True
if self.suppress_duplicates is not None:
return True
if self.suppression is not None and self.suppression._has_data():
return True
if self.trap_logging is not None and self.trap_logging._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_syslog_cfg as meta
return meta._meta_table['Syslog']['meta_info']
| 36.017862
| 337
| 0.453374
| 23,154
| 260,121
| 4.889306
| 0.023624
| 0.048266
| 0.060332
| 0.05883
| 0.852112
| 0.826627
| 0.804252
| 0.776295
| 0.74809
| 0.716519
| 0
| 0.017625
| 0.474548
| 260,121
| 7,221
| 338
| 36.02285
| 0.810632
| 0.23554
| 0
| 0.771597
| 0
| 0.018979
| 0.117824
| 0.07534
| 0
| 0
| 0
| 0
| 0
| 1
| 0.160668
| false
| 0
| 0.036649
| 0.012435
| 0.516688
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
ae9906ab2bf8111addf6cc2080dade2abb98f85d
| 27,658
|
py
|
Python
|
tb_rest_client/api/api_ce/admin_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 30
|
2020-06-19T06:42:50.000Z
|
2021-08-23T21:16:36.000Z
|
tb_rest_client/api/api_ce/admin_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 25
|
2021-08-30T01:17:27.000Z
|
2022-03-16T14:10:14.000Z
|
tb_rest_client/api/api_ce/admin_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 23
|
2020-07-06T13:41:54.000Z
|
2021-08-23T21:04:50.000Z
|
# coding: utf-8
"""
ThingsBoard REST API
ThingsBoard open-source IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3-SNAPSHOT
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class AdminControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def check_updates_using_get(self, **kwargs): # noqa: E501
"""Check for new Platform Releases (checkUpdates) # noqa: E501
Check notifications about new platform releases. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_updates_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UpdateMessage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_updates_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.check_updates_using_get_with_http_info(**kwargs) # noqa: E501
return data
def check_updates_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Check for new Platform Releases (checkUpdates) # noqa: E501
Check notifications about new platform releases. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_updates_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UpdateMessage
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_updates_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/admin/updates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpdateMessage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_admin_settings_using_get(self, key, **kwargs): # noqa: E501
"""Get the Administration Settings object using key (getAdminSettings) # noqa: E501
Get the Administration Settings object using specified string key. Referencing non-existing key will cause an error. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_admin_settings_using_get(key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str key: A string value of the key (e.g. 'general' or 'mail'). (required)
:return: AdminSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_admin_settings_using_get_with_http_info(key, **kwargs) # noqa: E501
else:
(data) = self.get_admin_settings_using_get_with_http_info(key, **kwargs) # noqa: E501
return data
def get_admin_settings_using_get_with_http_info(self, key, **kwargs): # noqa: E501
"""Get the Administration Settings object using key (getAdminSettings) # noqa: E501
Get the Administration Settings object using specified string key. Referencing non-existing key will cause an error. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_admin_settings_using_get_with_http_info(key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str key: A string value of the key (e.g. 'general' or 'mail'). (required)
:return: AdminSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['key'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_admin_settings_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'key' is set
if ('key' not in params or
params['key'] is None):
raise ValueError("Missing the required parameter `key` when calling `get_admin_settings_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'key' in params:
path_params['key'] = params['key'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/admin/settings/{key}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdminSettings', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_security_settings_using_get(self, **kwargs): # noqa: E501
"""Get the Security Settings object # noqa: E501
Get the Security Settings object that contains password policy, etc. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_security_settings_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: SecuritySettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_security_settings_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_security_settings_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_security_settings_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Get the Security Settings object # noqa: E501
Get the Security Settings object that contains password policy, etc. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_security_settings_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: SecuritySettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_security_settings_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/admin/securitySettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SecuritySettings', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_admin_settings_using_post(self, **kwargs): # noqa: E501
"""Get the Administration Settings object using key (getAdminSettings) # noqa: E501
Creates or Updates the Administration Settings. Platform generates random Administration Settings Id during settings creation. The Administration Settings Id will be present in the response. Specify the Administration Settings Id when you would like to update the Administration Settings. Referencing non-existing Administration Settings Id will cause an error. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_admin_settings_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AdminSettings body:
:return: AdminSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_admin_settings_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.save_admin_settings_using_post_with_http_info(**kwargs) # noqa: E501
return data
def save_admin_settings_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Get the Administration Settings object using key (getAdminSettings) # noqa: E501
Creates or Updates the Administration Settings. Platform generates random Administration Settings Id during settings creation. The Administration Settings Id will be present in the response. Specify the Administration Settings Id when you would like to update the Administration Settings. Referencing non-existing Administration Settings Id will cause an error. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_admin_settings_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AdminSettings body:
:return: AdminSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_admin_settings_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/admin/settings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdminSettings', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_security_settings_using_post(self, **kwargs): # noqa: E501
"""Update Security Settings (saveSecuritySettings) # noqa: E501
Updates the Security Settings object that contains password policy, etc. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_security_settings_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecuritySettings body:
:return: SecuritySettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_security_settings_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.save_security_settings_using_post_with_http_info(**kwargs) # noqa: E501
return data
def save_security_settings_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Update Security Settings (saveSecuritySettings) # noqa: E501
Updates the Security Settings object that contains password policy, etc. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_security_settings_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecuritySettings body:
:return: SecuritySettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_security_settings_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/admin/securitySettings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SecuritySettings', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def send_test_mail_using_post(self, **kwargs): # noqa: E501
"""Send test email (sendTestMail) # noqa: E501
Attempts to send test email to the System Administrator User using Mail Settings provided as a parameter. You may change the 'To' email in the user profile of the System Administrator. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_test_mail_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AdminSettings body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.send_test_mail_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.send_test_mail_using_post_with_http_info(**kwargs) # noqa: E501
return data
def send_test_mail_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Send test email (sendTestMail) # noqa: E501
Attempts to send test email to the System Administrator User using Mail Settings provided as a parameter. You may change the 'To' email in the user profile of the System Administrator. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_test_mail_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AdminSettings body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_test_mail_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/admin/settings/testMail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def send_test_sms_using_post(self, **kwargs): # noqa: E501
"""Send test sms (sendTestMail) # noqa: E501
Attempts to send test sms to the System Administrator User using SMS Settings and phone number provided as a parameters of the request. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_test_sms_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param TestSmsRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.send_test_sms_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.send_test_sms_using_post_with_http_info(**kwargs) # noqa: E501
return data
def send_test_sms_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Send test sms (sendTestMail) # noqa: E501
Attempts to send test sms to the System Administrator User using SMS Settings and phone number provided as a parameters of the request. Available for users with 'SYS_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_test_sms_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param TestSmsRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_test_sms_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/admin/settings/testSms', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.494876
| 432
| 0.627558
| 3,215
| 27,658
| 5.138414
| 0.069051
| 0.046005
| 0.023729
| 0.030508
| 0.955266
| 0.951211
| 0.946489
| 0.937833
| 0.932203
| 0.929056
| 0
| 0.014957
| 0.289283
| 27,658
| 682
| 433
| 40.554252
| 0.825457
| 0.38441
| 0
| 0.818942
| 0
| 0
| 0.160869
| 0.052534
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041783
| false
| 0
| 0.011142
| 0
| 0.114206
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae9e4d8b3f029999e78390dcc24fa7c4137dc45a
| 2,829
|
py
|
Python
|
database/nfl_data/migrations/0002_auto_20171217_1259.py
|
luispereda555/nfl-player-stats
|
e74c3817f7d10746546cdb9ce763bbd850badd89
|
[
"MIT"
] | 74
|
2017-12-09T14:32:21.000Z
|
2022-01-26T20:22:09.000Z
|
database/nfl_data/migrations/0002_auto_20171217_1259.py
|
luispereda555/nfl-player-stats
|
e74c3817f7d10746546cdb9ce763bbd850badd89
|
[
"MIT"
] | 14
|
2017-12-10T01:03:46.000Z
|
2020-12-02T16:12:39.000Z
|
database/nfl_data/migrations/0002_auto_20171217_1259.py
|
luispereda555/nfl-player-stats
|
e74c3817f7d10746546cdb9ce763bbd850badd89
|
[
"MIT"
] | 41
|
2017-12-08T22:31:13.000Z
|
2021-12-08T23:25:50.000Z
|
# Generated by Django 2.0 on 2017-12-17 12:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nfl_data', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='birth_date',
field=models.DateField(null=True),
),
migrations.AlterField(
model_name='profile',
name='birth_place',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='profile',
name='college',
field=models.CharField(max_length=30, null=True),
),
migrations.AlterField(
model_name='profile',
name='current_salary',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='profile',
name='current_team',
field=models.CharField(max_length=3, null=True),
),
migrations.AlterField(
model_name='profile',
name='death_date',
field=models.DateField(null=True),
),
migrations.AlterField(
model_name='profile',
name='draft_position',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='profile',
name='draft_round',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='profile',
name='draft_team',
field=models.CharField(max_length=3, null=True),
),
migrations.AlterField(
model_name='profile',
name='draft_year',
field=models.CharField(max_length=4, null=True),
),
migrations.AlterField(
model_name='profile',
name='height',
field=models.CharField(max_length=4, null=True),
),
migrations.AlterField(
model_name='profile',
name='high_school',
field=models.CharField(max_length=30, null=True),
),
migrations.AlterField(
model_name='profile',
name='hof_induction_year',
field=models.DateField(null=True),
),
migrations.AlterField(
model_name='profile',
name='name',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='profile',
name='position',
field=models.CharField(max_length=2, null=True),
),
migrations.AlterField(
model_name='profile',
name='weight',
field=models.IntegerField(null=True),
),
]
| 30.095745
| 61
| 0.538353
| 258
| 2,829
| 5.751938
| 0.22093
| 0.215633
| 0.269542
| 0.312668
| 0.84434
| 0.803908
| 0.803908
| 0.770216
| 0.737871
| 0.721698
| 0
| 0.016721
| 0.344645
| 2,829
| 93
| 62
| 30.419355
| 0.783711
| 0.0152
| 0
| 0.724138
| 1
| 0
| 0.105603
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011494
| 0
| 0.045977
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
882720546ecd055e4bdee39e9ffbd53d399c3aec
| 2,009
|
py
|
Python
|
BreastCancer/controller.py
|
darius-luca-tech/AI_Projects
|
3cff26878807121e077375e5dbef39390fea0189
|
[
"MIT"
] | 2
|
2020-07-11T14:48:27.000Z
|
2020-08-04T11:24:58.000Z
|
BreastCancer/controller.py
|
darius-luca-tech/AI_Projects
|
3cff26878807121e077375e5dbef39390fea0189
|
[
"MIT"
] | null | null | null |
BreastCancer/controller.py
|
darius-luca-tech/AI_Projects
|
3cff26878807121e077375e5dbef39390fea0189
|
[
"MIT"
] | null | null | null |
from model import InputForm
from flask import Flask, render_template, request
from compute import compute
app = Flask(__name__)
@app.route('/')
def index():
# form = InputForm(request.form)
# if request.method == 'POST' and form.validate():
# result = compute(form.a.data, form.b.data,form.c.data, form.d.data,form.e.data,form.z.data,form.g.data,form.h.data,form.i.data)
# else:
# result = None
return render_template('header.html')
@app.route('/Acasa/')
def home():
# form = InputForm(request.form)
# if request.method == 'POST' and form.validate():
# result = compute(form.a.data, form.b.data,form.c.data, form.d.data,form.e.data,form.z.data,form.g.data,form.h.data,form.i.data)
# else:
# result = None
return render_template('Acasa.html')
@app.route('/Sdc/', methods=['GET', 'POST'])
def usingdata():
form = InputForm(request.form)
if request.method == 'POST' and form.validate():
result = compute(form.a.data, form.b.data,form.c.data, form.d.data,form.e.data,form.z.data,form.g.data,form.h.data,form.i.data)
else:
result = None
return render_template('Sdc.html', form=form, result=result)
@app.route('/Mai_multe_informatii/')
def moreinfo():
# form = InputForm(request.form)
# if request.method == 'POST' and form.validate():
# result = compute(form.a.data, form.b.data,form.c.data, form.d.data,form.e.data,form.z.data,form.g.data,form.h.data,form.i.data)
# else:
# result = None
return render_template('Mai_multe_informatii.html')
@app.route('/Masuri_de_precautie/')
def preventivemeasures():
# form = InputForm(request.form)
# if request.method == 'POST' and form.validate():
# result = compute(form.a.data, form.b.data,form.c.data, form.d.data,form.e.data,form.z.data,form.g.data,form.h.data,form.i.data)
# else:
# result = None
return render_template('Masuri_de_precautie.html')
if __name__ == '__main__':
app.run(debug = True)
| 35.875
| 137
| 0.660528
| 304
| 2,009
| 4.279605
| 0.177632
| 0.245965
| 0.076864
| 0.092237
| 0.714835
| 0.714835
| 0.714835
| 0.714835
| 0.714835
| 0.714835
| 0
| 0
| 0.167247
| 2,009
| 55
| 138
| 36.527273
| 0.777645
| 0.469388
| 0
| 0
| 0
| 0
| 0.146411
| 0.088038
| 0
| 0
| 0
| 0
| 0
| 1
| 0.192308
| false
| 0
| 0.115385
| 0.153846
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
ee2883c2276cb3d65fb0f84406a9bc672000c003
| 8,393
|
py
|
Python
|
api/tests/test_project_posting_allocation.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | 1
|
2022-03-03T09:55:57.000Z
|
2022-03-03T09:55:57.000Z
|
api/tests/test_project_posting_allocation.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | 7
|
2022-02-09T10:44:53.000Z
|
2022-03-28T03:29:43.000Z
|
api/tests/test_project_posting_allocation.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from graphql_relay import from_global_id
from django.contrib.auth.models import AnonymousUser
from db.models import ProjectPostingState, ProjectPosting
# pylint: disable=R0913
@pytest.mark.django_db
def test_allocation_as_company(user_employee, company_project_posting_object, login,
project_posting_allocation):
_test_allocation(user_employee, user_employee.employee, user_employee.company, None,
company_project_posting_object, login, project_posting_allocation)
@pytest.mark.django_db
def test_allocation_as_student(user_student, company_project_posting_object, login,
project_posting_allocation):
_test_allocation(user_student, None, None, user_student.student, company_project_posting_object,
login, project_posting_allocation)
def _test_allocation(user, employee, company, student, company_project_posting_object, login,
project_posting_allocation):
login(user)
company_project_posting_object.form_step = 3
company_project_posting_object.company = company
company_project_posting_object.employee = None
company_project_posting_object.student = student
company_project_posting_object.state = ProjectPostingState.DRAFT
company_project_posting_object.save()
data, errors = project_posting_allocation(user, company_project_posting_object.id,
ProjectPostingState.PUBLIC, employee)
assert errors is None
assert data is not None
assert data.get('projectPostingAllocation') is not None
assert data.get('projectPostingAllocation').get('success')
slug = data.get('projectPostingAllocation').get('slug')
element_id = from_global_id(data.get('projectPostingAllocation').get('projectPostingId'))[1]
project_posting_slug = ProjectPosting.objects.get(slug=slug)
project_posting = ProjectPosting.objects.get(pk=element_id)
assert project_posting_slug == project_posting
if employee is not None:
assert project_posting.employee.id == employee.id
assert project_posting.company.id == employee.user.company.id
assert project_posting.student is None
if student is not None:
assert project_posting.student.id == student.id
assert project_posting.employee is None
assert project_posting.company is None
assert project_posting.state == ProjectPostingState.PUBLIC
assert project_posting.date_published is not None
assert project_posting.form_step == 4
@pytest.mark.django_db
def test_allocation_with_invalid_job_posting_id(user_employee, login, project_posting_allocation):
login(user_employee)
data, errors = project_posting_allocation(user_employee, 1337, ProjectPostingState.PUBLIC,
user_employee.employee)
assert errors is not None
assert data is not None
assert data.get('projectPostingAllocation') is None
@pytest.mark.django_db
def test_allocation_without_login(user_employee, company_project_posting_object,
project_posting_allocation):
data, errors = project_posting_allocation(AnonymousUser(), company_project_posting_object.id,
ProjectPostingState.PUBLIC, user_employee.employee)
assert errors is not None
assert data is not None
assert data.get('projectPostingAllocation') is None
@pytest.mark.django_db
def test_allocation_with_invalid_step(user_employee, company_project_posting_object, login,
project_posting_allocation):
login(user_employee)
company_project_posting_object.form_step = 1
company_project_posting_object.save()
data, errors = project_posting_allocation(user_employee, company_project_posting_object.id,
ProjectPostingState.PUBLIC, user_employee.employee)
assert errors is None
assert data is not None
assert data.get('projectPostingAllocation') is not None
assert data.get('projectPostingAllocation').get('success') is False
errors = data.get('projectPostingAllocation').get('errors')
assert errors is not None
assert 'projectPostingStep' in errors
@pytest.mark.django_db
def test_allocation_as_employee_from_another_company(user_employee, user_employee_2,
company_project_posting_object, login,
project_posting_allocation):
login(user_employee)
company_project_posting_object.form_step = 3
company_project_posting_object.company = user_employee.company
company_project_posting_object.employee = None
company_project_posting_object.student = None
company_project_posting_object.save()
data, errors = project_posting_allocation(user_employee_2, company_project_posting_object.id,
ProjectPostingState.PUBLIC, user_employee.employee)
assert errors is None
assert data is not None
assert data.get('projectPostingAllocation') is not None
assert data.get('projectPostingAllocation').get('success') is False
errors = data.get('projectPostingAllocation').get('errors')
assert 'employee' in errors
@pytest.mark.django_db
def test_allocation_as_student_with_project_of_company(user_employee, user_student,
company_project_posting_object, login,
project_posting_allocation):
login(user_employee)
company_project_posting_object.form_step = 3
company_project_posting_object.company = user_employee.company
company_project_posting_object.employee = None
company_project_posting_object.student = None
company_project_posting_object.save()
data, errors = project_posting_allocation(user_student, company_project_posting_object.id,
ProjectPostingState.PUBLIC, user_employee.employee)
assert errors is None
assert data is not None
assert data.get('projectPostingAllocation') is not None
assert data.get('projectPostingAllocation').get('success') is False
errors = data.get('projectPostingAllocation').get('errors')
assert 'employee' in errors
@pytest.mark.django_db
def test_allocation_as_company_with_project_of_student(user_employee, user_student,
company_project_posting_object, login,
project_posting_allocation):
login(user_employee)
company_project_posting_object.form_step = 3
company_project_posting_object.company = None
company_project_posting_object.employee = None
company_project_posting_object.student = user_student.student
company_project_posting_object.save()
data, errors = project_posting_allocation(user_employee, company_project_posting_object.id,
ProjectPostingState.PUBLIC, user_employee.employee)
assert errors is None
assert data is not None
assert data.get('projectPostingAllocation') is not None
assert data.get('projectPostingAllocation').get('success') is False
errors = data.get('projectPostingAllocation').get('errors')
assert 'employee' in errors
@pytest.mark.django_db
def test_allocation_as_company_with_project_of_student_without_employee(
user_employee, user_student, company_project_posting_object, login,
project_posting_allocation):
login(user_employee)
company_project_posting_object.form_step = 3
company_project_posting_object.company = None
company_project_posting_object.employee = None
company_project_posting_object.student = user_student.student
company_project_posting_object.save()
data, errors = project_posting_allocation(user_employee, company_project_posting_object.id,
ProjectPostingState.PUBLIC, None)
assert errors is None
assert data is not None
assert data.get('projectPostingAllocation') is not None
assert data.get('projectPostingAllocation').get('success') is False
errors = data.get('projectPostingAllocation').get('errors')
assert 'employee' in errors
| 46.370166
| 100
| 0.719647
| 935
| 8,393
| 6.125134
| 0.072727
| 0.19312
| 0.168675
| 0.216867
| 0.833944
| 0.818928
| 0.79099
| 0.753449
| 0.74367
| 0.706129
| 0
| 0.002746
| 0.219111
| 8,393
| 180
| 101
| 46.627778
| 0.871071
| 0.002502
| 0
| 0.662162
| 0
| 0
| 0.07718
| 0.060215
| 0
| 0
| 0
| 0
| 0.310811
| 1
| 0.067568
| false
| 0
| 0.027027
| 0
| 0.094595
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c99965083653baf3c5516cbeefbe972fb1297b51
| 17,666
|
py
|
Python
|
flotilla/test/data_model/test_data_model_gene_ontology.py
|
YeoLab/flotilla
|
31da64567e59003c2b9c03fc8f4eb27ee62e299c
|
[
"MIT",
"BSD-3-Clause"
] | 98
|
2015-01-08T19:38:47.000Z
|
2021-05-04T02:11:55.000Z
|
flotilla/test/data_model/test_data_model_gene_ontology.py
|
YeoLab/flotilla
|
31da64567e59003c2b9c03fc8f4eb27ee62e299c
|
[
"MIT",
"BSD-3-Clause"
] | 123
|
2015-01-08T22:28:43.000Z
|
2019-12-20T05:22:29.000Z
|
flotilla/test/data_model/test_data_model_gene_ontology.py
|
YeoLab/flotilla
|
31da64567e59003c2b9c03fc8f4eb27ee62e299c
|
[
"MIT",
"BSD-3-Clause"
] | 27
|
2015-01-21T15:41:40.000Z
|
2020-12-22T05:40:47.000Z
|
"""
"""
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
from collections import defaultdict
import numpy as np
import pandas as pd
import pandas.util.testing as pdt
import pytest
from scipy.stats import hypergeom
class TestGeneOntologyData(object):
@pytest.fixture
def gene_ontology(self, gene_ontology_data):
from flotilla import GeneOntologyData
return GeneOntologyData(gene_ontology_data)
def test_init(self, gene_ontology_data, gene_ontology):
true_data = gene_ontology_data.dropna()
true_all_genes = true_data['Ensembl Gene ID'].unique()
true_ontology = defaultdict(dict)
for go, df in true_data.groupby('GO Term Accession'):
true_ontology[go]['genes'] = set(df['Ensembl Gene ID'])
true_ontology[go]['name'] = df['GO Term Name'].values[0]
true_ontology[go]['domain'] = df['GO domain'].values[0]
true_ontology[go]['n_genes'] = len(true_ontology[go]['genes'])
pdt.assert_frame_equal(true_data, gene_ontology.data)
pdt.assert_numpy_array_equal(sorted(true_all_genes),
sorted(gene_ontology.all_genes))
pdt.assert_contains_all(true_ontology.keys(), gene_ontology.ontology)
pdt.assert_contains_all(gene_ontology.ontology.keys(), true_ontology)
for go, true_attributes in true_ontology.items():
test_attributes = gene_ontology.ontology[go]
true_genes = sorted(true_attributes['genes'])
test_genes = sorted(test_attributes['genes'])
pdt.assert_numpy_array_equal(true_genes, test_genes)
pdt.assert_equal(true_attributes['name'], test_attributes['name'])
pdt.assert_equal(true_attributes['domain'],
test_attributes['domain'])
pdt.assert_equal(true_attributes['n_genes'],
test_attributes['n_genes'])
def test_enrichment(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:10]
test_enrichment_df = gene_ontology.enrichment(features_of_interest)
p_value_cutoff = 1000000
min_feature_size = 3
min_background_size = 5
cross_reference = {}
domains = gene_ontology.domains
background = gene_ontology.all_genes
n_all_genes = len(background)
n_features_of_interest = len(features_of_interest)
enrichment = defaultdict(dict)
for go_term, go_genes in gene_ontology.ontology.items():
if go_genes['domain'] not in domains:
continue
features_in_go = go_genes['genes'].intersection(
features_of_interest)
background_in_go = go_genes['genes'].intersection(background)
too_few_features = len(features_in_go) < min_feature_size
too_few_background = len(background_in_go) < min_background_size
if too_few_features or too_few_background:
continue
# TODO D.R.Y. this
# Survival function is more accurate on small p-values
log_p_value = hypergeom.logsf(len(features_in_go), n_all_genes,
len(background_in_go),
n_features_of_interest)
# p_value = 0 if p_value < 0 else p_value
symbols = [cross_reference[f] if f in cross_reference else f for f
in features_in_go]
enrichment['negative_log_p_value'][go_term] = -log_p_value
enrichment['n_features_of_interest_in_go_term'][go_term] = len(
features_in_go)
enrichment['n_background_in_go_term'][go_term] = len(
background_in_go)
enrichment['n_features_total_in_go_term'][go_term] = len(
go_genes['genes'])
enrichment['features_of_interest_in_go_term'][
go_term] = ','.join(features_in_go)
enrichment['features_of_interest_in_go_term_gene_symbols'][
go_term] = ','.join(symbols)
enrichment['go_domain'][go_term] = go_genes['domain']
enrichment['go_name'][go_term] = go_genes['name']
enrichment_df = pd.DataFrame(enrichment)
# TODO D.R.Y. this
# Bonferonni correction
enrichment_df['bonferonni_corrected_negative_log_p_value'] = \
enrichment_df['negative_log_p_value'] \
- np.log(enrichment_df.shape[0])
ind = enrichment_df['bonferonni_corrected_negative_log_p_value'
] < np.log(p_value_cutoff)
enrichment_df = enrichment_df.ix[ind]
true_enrichment_df = enrichment_df.sort(
columns=['negative_log_p_value'], ascending=False)
pdt.assert_frame_equal(test_enrichment_df, true_enrichment_df)
# TODO : FOUR TESTS EXPECTED TO FAIL
####################################
@pytest.mark.xfail
def test_invalid_background(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:10]
background = [f + '_asdf' for f in features_of_interest]
gene_ontology.enrichment(features_of_interest,
background=background)
@pytest.mark.xfail
def test_invalid_features(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:10]
features_of_interest = [f + '_asdf' for f in features_of_interest]
background = [f + '_asdf' for f in gene_ontology.all_genes[:20]]
gene_ontology.enrichment(features_of_interest,
background=background)
@pytest.mark.xfail
def test_invalid_domain_str(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:10]
gene_ontology.enrichment(features_of_interest,
domain='fake_domain')
@pytest.mark.xfail
def test_invalid_domain_iterable(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:10]
gene_ontology.enrichment(features_of_interest,
domain=['fake_domain1', 'fake_domain2'])
##########################################################################
def test_custom_domain_str(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:10]
domain = 'cellular_component'
test_enrichment_df = gene_ontology.enrichment(features_of_interest,
domain=domain)
domains = frozenset([domain])
p_value_cutoff = 1000000
min_feature_size = 3
min_background_size = 5
cross_reference = {}
background = gene_ontology.all_genes
n_all_genes = len(background)
n_features_of_interest = len(features_of_interest)
enrichment = defaultdict(dict)
for go_term, go_genes in gene_ontology.ontology.items():
if go_genes['domain'] not in domains:
continue
features_in_go = go_genes['genes'].intersection(
features_of_interest)
background_in_go = go_genes['genes'].intersection(background)
too_few_features = len(features_in_go) < min_feature_size
too_few_background = len(background_in_go) < min_background_size
if too_few_features or too_few_background:
continue
# TODO D.R.Y. this
# Survival function is more accurate on small p-values
log_p_value = hypergeom.logsf(len(features_in_go), n_all_genes,
len(background_in_go),
n_features_of_interest)
# p_value = 0 if p_value < 0 else p_value
symbols = [cross_reference[f] if f in cross_reference else f for f
in features_in_go]
enrichment['negative_log_p_value'][go_term] = -log_p_value
enrichment['n_features_of_interest_in_go_term'][go_term] = len(
features_in_go)
enrichment['n_background_in_go_term'][go_term] = len(
background_in_go)
enrichment['n_features_total_in_go_term'][go_term] = len(
go_genes['genes'])
enrichment['features_of_interest_in_go_term'][
go_term] = ','.join(features_in_go)
enrichment['features_of_interest_in_go_term_gene_symbols'][
go_term] = ','.join(symbols)
enrichment['go_domain'][go_term] = go_genes['domain']
enrichment['go_name'][go_term] = go_genes['name']
enrichment_df = pd.DataFrame(enrichment)
# TODO D.R.Y. this
# Bonferonni correction
enrichment_df['bonferonni_corrected_negative_log_p_value'] = \
enrichment_df['negative_log_p_value'] \
- np.log(enrichment_df.shape[0])
ind = enrichment_df['bonferonni_corrected_negative_log_p_value'
] < np.log(p_value_cutoff)
enrichment_df = enrichment_df.ix[ind]
true_enrichment_df = enrichment_df.sort(
columns=['negative_log_p_value'], ascending=False)
pdt.assert_frame_equal(test_enrichment_df, true_enrichment_df)
def test_custom_domain_iterable(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:10]
domain = ['cellular_component', 'molecular_function']
test_enrichment_df = gene_ontology.enrichment(features_of_interest,
domain=domain)
domains = frozenset(domain)
p_value_cutoff = 1000000
min_feature_size = 3
min_background_size = 5
cross_reference = {}
background = gene_ontology.all_genes
n_all_genes = len(background)
n_features_of_interest = len(features_of_interest)
enrichment = defaultdict(dict)
for go_term, go_genes in gene_ontology.ontology.items():
if go_genes['domain'] not in domains:
continue
features_in_go = go_genes['genes'].intersection(
features_of_interest)
background_in_go = go_genes['genes'].intersection(background)
too_few_features = len(features_in_go) < min_feature_size
too_few_background = len(background_in_go) < min_background_size
if too_few_features or too_few_background:
continue
# TODO D.R.Y this
# Survival function is more accurate on small p-values
log_p_value = hypergeom.logsf(len(features_in_go), n_all_genes,
len(background_in_go),
n_features_of_interest)
# p_value = 0 if p_value < 0 else p_value
symbols = [cross_reference[f] if f in cross_reference else f for f
in features_in_go]
enrichment['negative_log_p_value'][go_term] = - log_p_value
enrichment['n_features_of_interest_in_go_term'][go_term] = len(
features_in_go)
enrichment['n_background_in_go_term'][go_term] = len(
background_in_go)
enrichment['n_features_total_in_go_term'][go_term] = len(
go_genes['genes'])
enrichment['features_of_interest_in_go_term'][
go_term] = ','.join(features_in_go)
enrichment['features_of_interest_in_go_term_gene_symbols'][
go_term] = ','.join(symbols)
enrichment['go_domain'][go_term] = go_genes['domain']
enrichment['go_name'][go_term] = go_genes['name']
enrichment_df = pd.DataFrame(enrichment)
# TODO D.R.Y. this
# Bonferonni correction
enrichment_df['bonferonni_corrected_negative_log_p_value'] = \
enrichment_df['negative_log_p_value'] \
- np.log(enrichment_df.shape[0])
ind = enrichment_df['bonferonni_corrected_negative_log_p_value'
] < np.log(p_value_cutoff)
enrichment_df = enrichment_df.ix[ind]
true_enrichment_df = enrichment_df.sort(
columns=['negative_log_p_value'], ascending=False)
pdt.assert_frame_equal(test_enrichment_df, true_enrichment_df)
def test_too_few_features(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:3]
test_enrichment_df = gene_ontology.enrichment(features_of_interest)
domains = gene_ontology.domains
p_value_cutoff = 1000000
min_feature_size = 3
min_background_size = 5
cross_reference = {}
background = gene_ontology.all_genes
n_all_genes = len(background)
n_features_of_interest = len(features_of_interest)
enrichment = defaultdict(dict)
for go_term, go_genes in gene_ontology.ontology.items():
if go_genes['domain'] not in domains:
continue
features_in_go = go_genes['genes'].intersection(
features_of_interest)
background_in_go = go_genes['genes'].intersection(background)
too_few_features = len(features_in_go) < min_feature_size
too_few_background = len(background_in_go) < min_background_size
if too_few_features or too_few_background:
continue
# TODO D.R.Y. this
# Survival function is more accurate on small p-values
log_p_value = hypergeom.logsf(len(features_in_go), n_all_genes,
len(background_in_go),
n_features_of_interest)
# p_value = 0 if p_value < 0 else p_value
symbols = [cross_reference[f] if f in cross_reference else f for f
in features_in_go]
enrichment['negative_log_p_value'][go_term] = -log_p_value
enrichment['n_features_of_interest_in_go_term'][go_term] = len(
features_in_go)
enrichment['n_background_in_go_term'][go_term] = len(
background_in_go)
enrichment['n_features_total_in_go_term'][go_term] = len(
go_genes['genes'])
enrichment['features_of_interest_in_go_term'][
go_term] = ','.join(features_in_go)
enrichment['features_of_interest_in_go_term_gene_symbols'][
go_term] = ','.join(symbols)
enrichment['go_domain'][go_term] = go_genes['domain']
enrichment['go_name'][go_term] = go_genes['name']
enrichment_df = pd.DataFrame(enrichment)
# TODO D.R.Y. this
# Bonferonni correction
enrichment_df['bonferonni_corrected_negative_log_p_value'] = \
enrichment_df['negative_log_p_value'] \
- np.log(enrichment_df.shape[0])
ind = enrichment_df['bonferonni_corrected_negative_log_p_value'
] < np.log(p_value_cutoff)
enrichment_df = enrichment_df.ix[ind]
true_enrichment_df = enrichment_df.sort(
columns=['negative_log_p_value'], ascending=False)
pdt.assert_frame_equal(test_enrichment_df, true_enrichment_df)
def test_no_enrichment(self, gene_ontology):
features_of_interest = gene_ontology.all_genes[:2]
test_enrichment_df = gene_ontology.enrichment(features_of_interest)
domains = gene_ontology.domains
min_feature_size = 3
min_background_size = 5
cross_reference = {}
background = gene_ontology.all_genes
n_all_genes = len(background)
n_features_of_interest = len(features_of_interest)
enrichment = defaultdict(dict)
for go_term, go_genes in gene_ontology.ontology.items():
if go_genes['domain'] not in domains:
continue
features_in_go = go_genes['genes'].intersection(
features_of_interest)
background_in_go = go_genes['genes'].intersection(background)
too_few_features = len(features_in_go) < min_feature_size
too_few_background = len(background_in_go) < min_background_size
if too_few_features or too_few_background:
continue
# Survival function is more accurate on small p-values
p_value = hypergeom.sf(len(features_in_go), n_all_genes,
len(background_in_go),
n_features_of_interest)
p_value = 0 if p_value < 0 else p_value
symbols = [cross_reference[f] if f in cross_reference else f
for f in features_in_go]
enrichment['p_value'][go_term] = p_value
enrichment['n_features_of_interest_in_go_term'][go_term] = len(
features_in_go)
enrichment['n_background_in_go_term'][go_term] = len(
background_in_go)
enrichment['n_features_total_in_go_term'][go_term] = len(
go_genes['genes'])
enrichment['features_of_interest_in_go_term'][
go_term] = ','.join(features_in_go)
enrichment['features_of_interest_in_go_term_gene_symbols'][
go_term] = ','.join(symbols)
enrichment['go_domain'][go_term] = go_genes['domain']
enrichment['go_name'][go_term] = go_genes['name']
true_enrichment_df = pd.DataFrame(enrichment)
assert true_enrichment_df.empty
assert test_enrichment_df is None
| 45.413882
| 78
| 0.622778
| 2,105
| 17,666
| 4.7981
| 0.070309
| 0.029703
| 0.099802
| 0.033663
| 0.870594
| 0.845347
| 0.839406
| 0.836535
| 0.830792
| 0.820891
| 0
| 0.005898
| 0.289766
| 17,666
| 388
| 79
| 45.530928
| 0.799075
| 0.038605
| 0
| 0.788274
| 0
| 0
| 0.110016
| 0.066378
| 0
| 0
| 0
| 0.002577
| 0.045603
| 1
| 0.035831
| false
| 0
| 0.029316
| 0
| 0.071661
| 0.003257
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9fa4b0291e9c571c5e5e6d4c6e26ba709c3e2ec
| 55,755
|
py
|
Python
|
dohq_teamcity/api/agent_pool_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 23
|
2018-10-19T07:28:45.000Z
|
2021-11-12T12:46:09.000Z
|
dohq_teamcity/api/agent_pool_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 31
|
2018-10-16T05:53:11.000Z
|
2021-09-09T14:44:14.000Z
|
dohq_teamcity/api/agent_pool_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 12
|
2018-10-28T23:00:17.000Z
|
2021-09-07T12:07:13.000Z
|
# coding: utf-8
"""
TeamCity REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2018.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
from dohq_teamcity.custom.base_model import TeamCityObject
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dohq_teamcity.models.agent import Agent # noqa: F401,E501
from dohq_teamcity.models.agent_pool import AgentPool # noqa: F401,E501
from dohq_teamcity.models.agent_pools import AgentPools # noqa: F401,E501
from dohq_teamcity.models.agents import Agents # noqa: F401,E501
from dohq_teamcity.models.project import Project # noqa: F401,E501
from dohq_teamcity.models.projects import Projects # noqa: F401,E501
class AgentPoolApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
base_name = 'AgentPool'
def __init__(self, api_client=None):
self.api_client = api_client
def add_agent(self, agent_pool_locator, **kwargs): # noqa: E501
"""add_agent # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_agent(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param Agent body:
:param str fields:
:return: Agent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_agent_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_agent_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
return data
def add_project(self, agent_pool_locator, **kwargs): # noqa: E501
"""add_project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_project(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param Project body:
:return: Project
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_project_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_project_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
return data
def create_pool(self, **kwargs): # noqa: E501
"""create_pool # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_pool(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param AgentPool body:
:return: AgentPool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__create_pool_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__create_pool_with_http_info(**kwargs) # noqa: E501
return data
def delete_pool(self, agent_pool_locator, **kwargs): # noqa: E501
"""delete_pool # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pool(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_pool_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_pool_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
return data
def delete_pool_project(self, agent_pool_locator, project_locator, **kwargs): # noqa: E501
"""delete_pool_project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pool_project(agent_pool_locator, project_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param str project_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_pool_project_with_http_info(agent_pool_locator, project_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_pool_project_with_http_info(agent_pool_locator, project_locator, **kwargs) # noqa: E501
return data
def delete_projects(self, agent_pool_locator, **kwargs): # noqa: E501
"""delete_projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_projects(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_projects_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_projects_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
return data
def get_field(self, agent_pool_locator, field, **kwargs): # noqa: E501
"""get_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_field(agent_pool_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_field_with_http_info(agent_pool_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__get_field_with_http_info(agent_pool_locator, field, **kwargs) # noqa: E501
return data
def get_pool(self, agent_pool_locator, **kwargs): # noqa: E501
"""get_pool # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pool(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param str fields:
:return: AgentPool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_pool_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_pool_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
return data
def get_pool_agents(self, agent_pool_locator, **kwargs): # noqa: E501
"""get_pool_agents # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pool_agents(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param str locator:
:param str fields:
:return: Agents
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_pool_agents_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_pool_agents_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
return data
def get_pool_project(self, agent_pool_locator, project_locator, **kwargs): # noqa: E501
"""get_pool_project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pool_project(agent_pool_locator, project_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param str project_locator: (required)
:param str fields:
:return: Project
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_pool_project_with_http_info(agent_pool_locator, project_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_pool_project_with_http_info(agent_pool_locator, project_locator, **kwargs) # noqa: E501
return data
def get_pool_projects(self, agent_pool_locator, **kwargs): # noqa: E501
"""get_pool_projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pool_projects(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param str fields:
:return: Projects
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_pool_projects_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_pool_projects_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
return data
def get_pools(self, **kwargs): # noqa: E501
"""get_pools # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pools(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:param str fields:
:return: AgentPools
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_pools_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__get_pools_with_http_info(**kwargs) # noqa: E501
return data
def replace_projects(self, agent_pool_locator, **kwargs): # noqa: E501
"""replace_projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_projects(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param Projects body:
:return: Projects
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_projects_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_projects_with_http_info(agent_pool_locator, **kwargs) # noqa: E501
return data
def set_field(self, agent_pool_locator, field, **kwargs): # noqa: E501
"""set_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_field(agent_pool_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str agent_pool_locator: (required)
:param str field: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_field_with_http_info(agent_pool_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__set_field_with_http_info(agent_pool_locator, field, **kwargs) # noqa: E501
return data
def __add_agent_with_http_info(self, agent_pool_locator, **kwargs): # noqa: E501
"""add_agent # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_agent_with_http_info(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param Agent body:
:param str fields:
:return: Agent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `add_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/agents', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Agent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_project_with_http_info(self, agent_pool_locator, **kwargs): # noqa: E501
"""add_project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_project_with_http_info(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param Project body:
:return: Project
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `add_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Project', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __create_pool_with_http_info(self, **kwargs): # noqa: E501
"""create_pool # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__create_pool_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AgentPool body:
:return: AgentPool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_pool" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentPool', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_pool_with_http_info(self, agent_pool_locator, **kwargs): # noqa: E501
"""delete_pool # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_pool_with_http_info(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_pool" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `delete_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_pool_project_with_http_info(self, agent_pool_locator, project_locator, **kwargs): # noqa: E501
"""delete_pool_project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_pool_project_with_http_info(agent_pool_locator, project_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param str project_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'project_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_pool_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `delete_pool_project`") # noqa: E501
# verify the required parameter 'project_locator' is set
if ('project_locator' not in params or
params['project_locator'] is None):
raise ValueError("Missing the required parameter `project_locator` when calling `delete_pool_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
if 'project_locator' in params:
if isinstance(params['project_locator'], TeamCityObject):
path_params['projectLocator'] = params['project_locator'].locator_id
else:
path_params['projectLocator'] = params['project_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/projects/{projectLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_projects_with_http_info(self, agent_pool_locator, **kwargs): # noqa: E501
"""delete_projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_projects_with_http_info(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_projects" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `delete_projects`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/projects', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_field_with_http_info(self, agent_pool_locator, field, **kwargs): # noqa: E501
"""get_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_field_with_http_info(agent_pool_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_field" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `get_field`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `get_field`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/{field}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_pool_with_http_info(self, agent_pool_locator, **kwargs): # noqa: E501
"""get_pool # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_pool_with_http_info(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param str fields:
:return: AgentPool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pool" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `get_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentPool', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_pool_agents_with_http_info(self, agent_pool_locator, **kwargs): # noqa: E501
"""get_pool_agents # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_pool_agents_with_http_info(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param str locator:
:param str fields:
:return: Agents
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pool_agents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `get_pool_agents`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/agents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Agents', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_pool_project_with_http_info(self, agent_pool_locator, project_locator, **kwargs): # noqa: E501
"""get_pool_project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_pool_project_with_http_info(agent_pool_locator, project_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param str project_locator: (required)
:param str fields:
:return: Project
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'project_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pool_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `get_pool_project`") # noqa: E501
# verify the required parameter 'project_locator' is set
if ('project_locator' not in params or
params['project_locator'] is None):
raise ValueError("Missing the required parameter `project_locator` when calling `get_pool_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
if 'project_locator' in params:
if isinstance(params['project_locator'], TeamCityObject):
path_params['projectLocator'] = params['project_locator'].locator_id
else:
path_params['projectLocator'] = params['project_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/projects/{projectLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Project', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_pool_projects_with_http_info(self, agent_pool_locator, **kwargs): # noqa: E501
"""get_pool_projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_pool_projects_with_http_info(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param str fields:
:return: Projects
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pool_projects" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `get_pool_projects`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Projects', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_pools_with_http_info(self, **kwargs): # noqa: E501
"""get_pools # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_pools_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:param str fields:
:return: AgentPools
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pools" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentPools', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_projects_with_http_info(self, agent_pool_locator, **kwargs): # noqa: E501
"""replace_projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_projects_with_http_info(agent_pool_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param Projects body:
:return: Projects
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_projects" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `replace_projects`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/projects', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Projects', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_field_with_http_info(self, agent_pool_locator, field, **kwargs): # noqa: E501
"""set_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_field_with_http_info(agent_pool_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agent_pool_locator: (required)
:param str field: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_pool_locator', 'field', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_field" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_pool_locator' is set
if ('agent_pool_locator' not in params or
params['agent_pool_locator'] is None):
raise ValueError("Missing the required parameter `agent_pool_locator` when calling `set_field`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `set_field`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_pool_locator' in params:
if isinstance(params['agent_pool_locator'], TeamCityObject):
path_params['agentPoolLocator'] = params['agent_pool_locator'].locator_id
else:
path_params['agentPoolLocator'] = params['agent_pool_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/agentPools/{agentPoolLocator}/{field}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.768188
| 132
| 0.611586
| 6,291
| 55,755
| 5.111588
| 0.026387
| 0.057375
| 0.101502
| 0.041049
| 0.979382
| 0.977112
| 0.976646
| 0.968996
| 0.9643
| 0.957956
| 0
| 0.014006
| 0.298269
| 55,755
| 1,401
| 133
| 39.796574
| 0.807898
| 0.28089
| 0
| 0.825815
| 1
| 0
| 0.208602
| 0.041694
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036341
| false
| 0
| 0.012531
| 0
| 0.10401
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a00404bd3f4f82eb4ad8f0b592cd3d76f188b635
| 203
|
py
|
Python
|
blog/views.py
|
binxsahib/WholesaleManagement
|
e3a59773abe3bbe81538ba4f2debcbe9b3d3547a
|
[
"CC0-1.0"
] | null | null | null |
blog/views.py
|
binxsahib/WholesaleManagement
|
e3a59773abe3bbe81538ba4f2debcbe9b3d3547a
|
[
"CC0-1.0"
] | null | null | null |
blog/views.py
|
binxsahib/WholesaleManagement
|
e3a59773abe3bbe81538ba4f2debcbe9b3d3547a
|
[
"CC0-1.0"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
def homepage(request):
return render(request, 'Blog/home.html')
def contact(request):
return render(request, 'Blog/contact.html')
| 20.3
| 47
| 0.738916
| 27
| 203
| 5.555556
| 0.62963
| 0.173333
| 0.253333
| 0.346667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147783
| 203
| 9
| 48
| 22.555556
| 0.867052
| 0.1133
| 0
| 0
| 0
| 0
| 0.174157
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a00b31466ab77e7f50c4562e59adb3fd769af515
| 24,020
|
py
|
Python
|
tests/st/gradient/test_custom_cell_bprop.py
|
httpsgithu/mindspore
|
c29d6bb764e233b427319cb89ba79e420f1e2c64
|
[
"Apache-2.0"
] | 1
|
2022-02-23T09:13:43.000Z
|
2022-02-23T09:13:43.000Z
|
tests/st/gradient/test_custom_cell_bprop.py
|
949144093/mindspore
|
c29d6bb764e233b427319cb89ba79e420f1e2c64
|
[
"Apache-2.0"
] | null | null | null |
tests/st/gradient/test_custom_cell_bprop.py
|
949144093/mindspore
|
c29d6bb764e233b427319cb89ba79e420f1e2c64
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test_cell_bprop """
import numpy as np
import pytest
import mindspore as ms
import mindspore.common.dtype as mstype
import mindspore.nn as nn
from mindspore import Parameter, ParameterTuple
from mindspore import context
from mindspore.common.initializer import initializer
from mindspore.common.tensor import Tensor
from mindspore.ops import composite as C
from mindspore.ops import operations as P
grad_all = C.GradOperation(get_all=True)
class MulAdd(nn.Cell):
def construct(self, x, y):
return 2 * x + y
def bprop(self, x, y, out, dout):
# In this test case, The user defined bprop is wrong defined purposely to distinguish from ad result
return 2 * dout, 2 * y
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_mul_add():
mul_add = MulAdd()
x = Tensor(1, dtype=ms.int32)
y = Tensor(2, dtype=ms.int32)
assert grad_all(mul_add)(x, y) == (2, 4)
class InlineMulADD(nn.Cell):
def __init__(self):
super(InlineMulADD, self).__init__()
self.mul_add = MulAdd()
self.param = 2
def construct(self, x, y):
return self.mul_add(x, y) + x + self.param * y
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_inline_mul_add():
inline_mul_add = InlineMulADD()
x = Tensor(1, dtype=ms.int32)
y = Tensor(2, dtype=ms.int32)
assert grad_all(inline_mul_add)(x, y) == (3, 6)
class WithParameter(nn.Cell):
def __init__(self):
super(WithParameter, self).__init__()
self.param1 = Parameter(1, 'param1')
self.param2 = Parameter(2, 'param2')
def construct(self, x, y):
return self.param1 * self.param2 * x + y
def bprop(self, x, y, out, dout):
# In this test case, The user defined bprop is wrong defined purposely to distinguish from ad result
return self.param1 * self.param2 * dout, 2 * y
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_with_param():
with_param = WithParameter()
with pytest.raises(RuntimeError):
grad_all(with_param)(1, 2)
class WithNoBprop(nn.Cell):
def construct(self, x, y):
return 2 * x + y
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_with_no_bprop():
with_no_bprop = WithNoBprop()
x = Tensor(1, dtype=ms.int32)
y = Tensor(2, dtype=ms.int32)
assert grad_all(with_no_bprop)(x, y) == (2, 1)
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_in_bprop_1():
class GradInBprop_1(nn.Cell):
def __init__(self):
super(GradInBprop_1, self).__init__()
self.relu = P.ReLU()
def construct(self, x, y):
return self.relu(x)
class GradInBprop_2(nn.Cell):
def __init__(self):
super(GradInBprop_2, self).__init__()
self.f = GradInBprop_1()
def construct(self, x, y):
return self.f(x, y), grad_all(self.f)(x, y)
def bprop(self, x, y, out, dout):
grads = grad_all(self.f)(x, y)
return out[1][0], grads[1]
class GradInBprop_3(nn.Cell):
def __init__(self):
super(GradInBprop_3, self).__init__()
self.f = GradInBprop_2()
def construct(self, x, y):
return self.f(x, y)
grad_in_bprop = GradInBprop_3()
grads = grad_all(grad_in_bprop)(Tensor(np.ones([2, 2]).astype(np.float32)),
Tensor(np.ones([2, 2]).astype(np.float32)))
assert (grads[0].asnumpy() == np.ones([2, 2]).astype(np.float32)).all()
assert (grads[1].asnumpy() == np.zeros([2, 2]).astype(np.float32)).all()
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_in_bprop_2():
class GradInBprop_1(nn.Cell):
def __init__(self):
super(GradInBprop_1, self).__init__()
self.relu = P.ReLU()
def construct(self, x, y):
return self.relu(x)
def bprop(self, x, y, out, dout):
return x * y, y + x
class GradInBprop_2(nn.Cell):
def __init__(self):
super(GradInBprop_2, self).__init__()
self.f = GradInBprop_1()
def construct(self, x, y):
return self.f(x, y), grad_all(self.f)(x, y)
def bprop(self, x, y, out, dout):
grads = grad_all(self.f)(x, y)
return out[1][0], grads[1]
class GradInBprop_3(nn.Cell):
def __init__(self):
super(GradInBprop_3, self).__init__()
self.f = GradInBprop_2()
def construct(self, x, y):
return self.f(x, y)
grad_in_bprop = GradInBprop_3()
grads = grad_all(grad_in_bprop)(Tensor(np.ones([2, 2]).astype(np.float32)),
Tensor(np.ones([2, 2]).astype(np.float32)))
assert (grads[0].asnumpy() == np.ones([2, 2]).astype(np.float32)).all()
assert (grads[1].asnumpy() == np.array([[2, 2], [2, 2]]).astype(np.float32)).all()
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_in_bprop_3():
class GradInBprop_1(nn.Cell):
def __init__(self):
super(GradInBprop_1, self).__init__()
self.relu = P.ReLU()
def construct(self, x, y):
return self.relu(x)
class GradInBprop_2(nn.Cell):
def __init__(self):
super(GradInBprop_2, self).__init__()
self.f = GradInBprop_1()
def construct(self, x, y):
return self.f(x, y), grad_all(self.f)(x, y)
def bprop(self, x, y, out, dout):
grads = grad_all(self.f)(x, y)
return out[1][0], grads[1]
class GradInBprop_3(nn.Cell):
def __init__(self):
super(GradInBprop_3, self).__init__()
self.f = GradInBprop_2()
def construct(self, x, y):
return self.f(x, y)
def bprop(self, x, y, out, dout):
return x + y + y + out[0], x + x + y + y + dout[0]
grad_in_bprop = GradInBprop_3()
grads = grad_all(grad_in_bprop)(Tensor(np.ones([2, 2]).astype(np.float32)),
Tensor(np.ones([2, 2]).astype(np.float32)))
assert (grads[0].asnumpy() == np.array([[4, 4], [4, 4]]).astype(np.float32)).all()
assert (grads[1].asnumpy() == np.array([[5, 5], [5, 5]]).astype(np.float32)).all()
class OneInputBprop(nn.Cell):
def __init__(self):
super().__init__()
self.op = P.ReLU()
def construct(self, x):
return self.op(x)
def bprop(self, x, out, dout):
return (5 * x,)
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_one_input_bprop():
net = OneInputBprop()
input1 = Tensor(np.ones([2, 2]).astype(np.float32))
grad = grad_all(net)(input1)
assert (grad[0].asnumpy() == np.array([5, 5]).astype(np.float32)).all()
class TwoInput(nn.Cell):
def construct(self, x, y):
return x * y
class InlineBpropTwoInput(nn.Cell):
def __init__(self):
super().__init__()
self.f = TwoInput()
def construct(self, x, y):
return self.f(x, y), grad_all(self.f)(x, y)
def bprop(self, x, y, out, dout):
grads = grad_all(self.f)(x, y)
return grads[0] * 2, grads[1] * 2
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_inline_bprop_two_input():
net = InlineBpropTwoInput()
input1 = Tensor(np.ones([2, 2]).astype(np.float32))
input2 = Tensor(np.ones([2, 2]).astype(np.float32))
grads = grad_all(net)(input1, input2)
assert (grads[0].asnumpy() == np.array([2, 2]).astype(np.float32)).all()
assert (grads[1].asnumpy() == np.array([2, 2]).astype(np.float32)).all()
assert len(grads) == 2
class TwoInputBprop(nn.Cell):
def __init__(self):
super().__init__()
self.op = P.Mul()
def construct(self, x, y):
return self.op(x, y)
def bprop(self, x, y, out, dout):
return 5 * x, 8 * y
class TwoInputWithParameter(nn.Cell):
def __init__(self):
super().__init__()
self.op = P.Mul()
self.inputdata = Parameter(initializer(1, (2, 2), mstype.float32), name="global_step")
def construct(self, x, y):
x = self.inputdata + x
return self.op(x, y)
class TwoInputWithOnlyInitParameterBprop(nn.Cell):
def __init__(self):
super().__init__()
self.op = P.Mul()
self.inputdata = Parameter(initializer(1, (2, 2), mstype.float32), name="global_step")
def construct(self, x, y):
return self.op(x, y)
def bprop(self, x, y, out, dout):
return 5 * x, 8 * y
class InlineMutilTwoInputParameterCell(nn.Cell):
def __init__(self):
super().__init__()
self.f1 = TwoInputBprop()
self.f2 = TwoInput()
self.f3 = TwoInputWithParameter()
self.f4 = TwoInputWithOnlyInitParameterBprop()
def construct(self, x, y):
output = self.f1(x, y) + self.f2(x, y) + self.f3(x, y) + self.f4(x, y)
return output
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_inline_bprop_multi_input():
net = InlineMutilTwoInputParameterCell()
input1 = Tensor(np.ones([2, 2]).astype(np.float32))
input2 = Tensor(np.ones([2, 2]).astype(np.float32))
net.init_parameters_data()
grads = grad_all(net)(input1, input2)
assert (grads[0].asnumpy() == np.array([[12, 12], [12, 12]]).astype(np.float32)).all()
assert (grads[1].asnumpy() == np.array([[19, 19], [19, 19]]).astype(np.float32)).all()
assert len(grads) == 2
class MulAddWithParam(nn.Cell):
def __init__(self):
super(MulAddWithParam, self).__init__()
self.mul_add = MulAdd()
self.param = Parameter(Tensor(np.array([[3, 2]], np.float32)), 'param')
def construct(self, x):
return self.mul_add(self.param, x)
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_refkey_bprop():
grad_by_list = C.GradOperation(get_all=True, get_by_list=True)
class GradWrap(nn.Cell):
def __init__(self, network):
super(GradWrap, self).__init__()
self.network = network
self.weights = ParameterTuple(filter(lambda x: x.requires_grad, network.get_parameters()))
def construct(self, x):
weights = self.weights
grads = grad_by_list(self.network, weights)(x)
return grads
network = GradWrap(MulAddWithParam())
input_data = Tensor(np.array([2, 2], np.float32))
grads = network(input_data)
assert (grads[0][0].asnumpy() == np.array([4, 4]).astype(np.float32)).all()
assert (grads[1][0].asnumpy() == np.array([2, 2]).astype(np.float32)).all()
class MulAddWithWrongOutputNum(nn.Cell):
def construct(self, x, y):
return 2 * x + y
def bprop(self, x, y, out, dout):
return (2 * dout,)
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_mul_add_with_wrong_output_num():
context.set_context(check_bprop=True)
mul_add = MulAddWithWrongOutputNum()
with pytest.raises(TypeError):
grad_all(mul_add)(1, 2)
class MulAddWithWrongOutputType(nn.Cell):
def construct(self, x, y):
return 2 * x + y
def bprop(self, x, y, out, dout):
return 2 * dout, 2
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_mul_add_with_wrong_output_type():
context.set_context(check_bprop=True)
mul_add = MulAddWithWrongOutputType()
with pytest.raises(TypeError):
grad_all(mul_add)(1, Tensor(np.ones([2, 2])))
class MulAddWithWrongOutputShape(nn.Cell):
def __init__(self):
super(MulAddWithWrongOutputShape, self).__init__()
self.ones = Tensor(np.ones([2,]))
def construct(self, x, y):
return 2 * x + y
def bprop(self, x, y, out, dout):
return 2, self.ones
@pytest.mark.level0
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_grad_mul_add_with_wrong_output_shape():
context.set_context(check_bprop=True)
mul_add = MulAddWithWrongOutputShape()
with pytest.raises(TypeError):
grad_all(mul_add)(1, Tensor(np.ones([2, 2])))
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_forward_with_parameter():
"""
Feature: Custom cell bprop
Description: Get the gradients of inputs when the forward net using Parameter.
Expectation: Get the correct gradients.
"""
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.matmul = P.MatMul()
self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')
def construct(self, x, y):
x = x * self.z
out = self.matmul(x, y)
return out
def bprop(self, x, y, out, dout):
dx = x + x
dy = y + y
return dx, dy
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
def construct(self, x, y):
grad_f = grad_all(self.net)
return grad_f(x, y)
x = Tensor([[0.5, 0.6, 0.4], [1.2, 1.3, 1.1]], dtype=mstype.float32)
y = Tensor([[0.01, 0.3, 1.1], [0.1, 0.2, 1.3], [2.1, 1.2, 3.3]], dtype=mstype.float32)
out = GradNet(Net())(x, y)
expect_dx = np.array([[1.0, 1.2, 0.8],
[2.4, 2.6, 2.2]]).astype(np.float32)
expect_dy = np.array([[0.02, 0.6, 2.2],
[0.2, 0.4, 2.6],
[4.2, 2.4, 6.6]]).astype(np.float32)
assert np.allclose(out[0].asnumpy(), expect_dx)
assert np.allclose(out[1].asnumpy(), expect_dy)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_forward_with_parameter_in_sub_cell():
"""
Feature: Custom cell bprop
Description: Get the gradients of inputs when the forward net using Parameter in the sub-cell.
Expectation: Get the correct gradients.
"""
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.net = Net1()
def construct(self, x, y):
return self.net(x, y)
class Net1(nn.Cell):
def __init__(self):
super(Net1, self).__init__()
self.matmul = P.MatMul()
self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')
def construct(self, x, y):
x = x * self.z
out = self.matmul(x, y)
return out
def bprop(self, x, y, out, dout):
dx = x + x
dy = y + y
return dx, dy
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
def construct(self, x, y):
grad_f = grad_all(self.net)
return grad_f(x, y)
x = Tensor([[0.5, 0.6, 0.4], [1.2, 1.3, 1.1]], dtype=mstype.float32)
y = Tensor([[0.01, 0.3, 1.1], [0.1, 0.2, 1.3], [2.1, 1.2, 3.3]], dtype=mstype.float32)
out = GradNet(Net())(x, y)
expect_dx = np.array([[1.0, 1.2, 0.8],
[2.4, 2.6, 2.2]]).astype(np.float32)
expect_dy = np.array([[0.02, 0.6, 2.2],
[0.2, 0.4, 2.6],
[4.2, 2.4, 6.6]]).astype(np.float32)
assert np.allclose(out[0].asnumpy(), expect_dx)
assert np.allclose(out[1].asnumpy(), expect_dy)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_forward_with_parameter_in_sub_cell_get_by_list():
"""
Feature: Custom cell bprop
Description: Get the gradients of inputs and Parameters when the forward net using Parameter in the sub-cell.
Expectation: Get the correct gradients.
"""
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.net = Net1()
def construct(self, x, y):
return self.net(x, y)
class Net1(nn.Cell):
def __init__(self):
super(Net1, self).__init__()
self.matmul = P.MatMul()
self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')
def construct(self, x, y):
x = x * self.z
out = self.matmul(x, y)
return out
def bprop(self, x, y, out, dout):
dx = x + x
dy = y + y
return dx, dy
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
self.params = ParameterTuple(net.trainable_params())
self.grad_op = C.GradOperation(get_by_list=True, get_all=True)
def construct(self, x, y):
grad_f = self.grad_op(self.net, self.params)
return grad_f(x, y)
x = Tensor([[0.5, 0.6, 0.4], [1.2, 1.3, 1.1]], dtype=mstype.float32)
y = Tensor([[0.01, 0.3, 1.1], [0.1, 0.2, 1.3], [2.1, 1.2, 3.3]], dtype=mstype.float32)
out = GradNet(Net())(x, y)
expect_dx = np.array([[1.0, 1.2, 0.8],
[2.4, 2.6, 2.2]]).astype(np.float32)
expect_dy = np.array([[0.02, 0.6, 2.2],
[0.2, 0.4, 2.6],
[4.2, 2.4, 6.6]]).astype(np.float32)
expect_dz = np.array([0.0]).astype(np.float32)
assert np.allclose(out[0][0].asnumpy(), expect_dx)
assert np.allclose(out[0][1].asnumpy(), expect_dy)
assert np.allclose(out[1][0].asnumpy(), expect_dz)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_pynative_forward_with_parameter():
"""
Feature: Custom cell bprop
Description: Get the gradients of inputs when the forward net using Parameter.
Expectation: Get the correct gradients.
"""
context.set_context(mode=context.PYNATIVE_MODE)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.matmul = P.MatMul()
self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')
def construct(self, x, y):
x = x * self.z
out = self.matmul(x, y)
return out
def bprop(self, x, y, out, dout):
dx = x + x
dy = y + y
return dx, dy
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
def construct(self, x, y):
grad_f = grad_all(self.net)
return grad_f(x, y)
x = Tensor([[0.5, 0.6, 0.4], [1.2, 1.3, 1.1]], dtype=mstype.float32)
y = Tensor([[0.01, 0.3, 1.1], [0.1, 0.2, 1.3], [2.1, 1.2, 3.3]], dtype=mstype.float32)
out = GradNet(Net())(x, y)
expect_dx = np.array([[1.0, 1.2, 0.8],
[2.4, 2.6, 2.2]]).astype(np.float32)
expect_dy = np.array([[0.02, 0.6, 2.2],
[0.2, 0.4, 2.6],
[4.2, 2.4, 6.6]]).astype(np.float32)
assert np.allclose(out[0].asnumpy(), expect_dx)
assert np.allclose(out[1].asnumpy(), expect_dy)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_pynative_forward_with_parameter_in_sub_cell():
"""
Feature: Custom cell bprop
Description: Get the gradients of inputs when the forward net using Parameter in the sub-cell.
Expectation: Get the correct gradients.
"""
context.set_context(mode=context.PYNATIVE_MODE)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.net = Net1()
def construct(self, x, y):
return self.net(x, y)
class Net1(nn.Cell):
def __init__(self):
super(Net1, self).__init__()
self.matmul = P.MatMul()
self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')
def construct(self, x, y):
x = x * self.z
out = self.matmul(x, y)
return out
def bprop(self, x, y, out, dout):
dx = x + x
dy = y + y
return dx, dy
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
def construct(self, x, y):
grad_f = grad_all(self.net)
return grad_f(x, y)
x = Tensor([[0.5, 0.6, 0.4], [1.2, 1.3, 1.1]], dtype=mstype.float32)
y = Tensor([[0.01, 0.3, 1.1], [0.1, 0.2, 1.3], [2.1, 1.2, 3.3]], dtype=mstype.float32)
out = GradNet(Net())(x, y)
expect_dx = np.array([[1.0, 1.2, 0.8],
[2.4, 2.6, 2.2]]).astype(np.float32)
expect_dy = np.array([[0.02, 0.6, 2.2],
[0.2, 0.4, 2.6],
[4.2, 2.4, 6.6]]).astype(np.float32)
assert np.allclose(out[0].asnumpy(), expect_dx)
assert np.allclose(out[1].asnumpy(), expect_dy)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_pynative_forward_with_parameter_in_sub_cell_get_by_list():
"""
Feature: Custom cell bprop
Description: Get the gradients of inputs and Parameters when the forward net using Parameter in the sub-cell.
Expectation: Get the correct gradients.
"""
context.set_context(mode=context.PYNATIVE_MODE)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.net = Net1()
def construct(self, x, y):
return self.net(x, y)
class Net1(nn.Cell):
def __init__(self):
super(Net1, self).__init__()
self.matmul = P.MatMul()
self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')
def construct(self, x, y):
x = x * self.z
out = self.matmul(x, y)
return out
def bprop(self, x, y, out, dout):
dx = x + x
dy = y + y
return dx, dy
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
self.params = ParameterTuple(net.trainable_params())
self.grad_op = C.GradOperation(get_by_list=True, get_all=True)
def construct(self, x, y):
grad_f = self.grad_op(self.net, self.params)
return grad_f(x, y)
x = Tensor([[0.5, 0.6, 0.4], [1.2, 1.3, 1.1]], dtype=mstype.float32)
y = Tensor([[0.01, 0.3, 1.1], [0.1, 0.2, 1.3], [2.1, 1.2, 3.3]], dtype=mstype.float32)
out = GradNet(Net())(x, y)
expect_dx = np.array([[1.0, 1.2, 0.8],
[2.4, 2.6, 2.2]]).astype(np.float32)
expect_dy = np.array([[0.02, 0.6, 2.2],
[0.2, 0.4, 2.6],
[4.2, 2.4, 6.6]]).astype(np.float32)
expect_dz = np.array([0.0]).astype(np.float32)
assert np.allclose(out[0][0].asnumpy(), expect_dx)
assert np.allclose(out[0][1].asnumpy(), expect_dy)
assert np.allclose(out[1][0].asnumpy(), expect_dz)
| 31.480996
| 113
| 0.58751
| 3,502
| 24,020
| 3.849515
| 0.064535
| 0.017061
| 0.025369
| 0.051702
| 0.827238
| 0.813515
| 0.800979
| 0.795119
| 0.773162
| 0.761294
| 0
| 0.046615
| 0.261407
| 24,020
| 762
| 114
| 31.52231
| 0.713263
| 0.075937
| 0
| 0.781588
| 0
| 0
| 0.002042
| 0
| 0
| 0
| 0
| 0
| 0.057762
| 1
| 0.211191
| false
| 0
| 0.019856
| 0.064982
| 0.415162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4e7f806a107cdfeb9541f4980b73ac7518b0c619
| 34,881
|
py
|
Python
|
tests/test_validator_plist.py
|
source-foundry/ufolint
|
88c744d7f8d45c62701c58f1a028f0283670571f
|
[
"MIT"
] | 22
|
2017-08-07T13:58:28.000Z
|
2021-11-21T17:01:01.000Z
|
tests/test_validator_plist.py
|
source-foundry/ufolint
|
88c744d7f8d45c62701c58f1a028f0283670571f
|
[
"MIT"
] | 119
|
2017-08-03T14:08:02.000Z
|
2022-03-23T06:04:33.000Z
|
tests/test_validator_plist.py
|
source-foundry/ufolint
|
88c744d7f8d45c62701c58f1a028f0283670571f
|
[
"MIT"
] | 4
|
2017-08-08T12:20:58.000Z
|
2020-11-25T14:38:27.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import pytest
from ufolint.validators import plistvalidators
from ufolint.data.ufo import Ufo2, Ufo3
# ///////////////////////////////////////////////////////
#
# CONSTANTS
#
# ///////////////////////////////////////////////////////
ufo2_dir_list = [['public.default', 'glyphs']]
ufo3_dir_list = [['public.default', 'glyphs'], ['glyphs.background', 'glyphs.public.background']]
ufo2_test_success_path = os.path.join('tests', 'testfiles', 'ufo', 'passes', 'UFO2-Pass.ufo')
ufo3_test_success_path = os.path.join('tests', 'testfiles', 'ufo', 'passes', 'UFO3-Pass.ufo')
metainfo_test_dir_failpath = os.path.join('tests', 'testfiles', 'ufo', 'fails', 'metainfoPL')
fontinfo_test_dir_failpath = os.path.join('tests', 'testfiles', 'ufo', 'fails', 'fontinfoPL')
groups_test_dir_failpath = os.path.join('tests', 'testfiles', 'ufo', 'fails', 'groupsPL')
kerning_test_dir_failpath = os.path.join('tests', 'testfiles', 'ufo', 'fails', 'kerningPL')
lib_test_dir_failpath = os.path.join('tests', 'testfiles', 'ufo', 'fails', 'libPL')
contents_test_dir_failpath = os.path.join('tests', 'testfiles', 'ufo', 'fails', 'contentsPL')
layercontents_test_dir_failpath = os.path.join('tests', 'testfiles', 'ufo', 'fails', 'layercontentsPL')
layerinfo_test_dir_failpath = os.path.join('tests', 'testfiles', 'ufo', 'fails', 'layerinfoPL')
# ///////////////////////////////////////////////////////
#
# Abstract plist validator super class tests
#
# ///////////////////////////////////////////////////////
def test_validators_plist_abstractplist_ufo2_instantiation():
abvalid = plistvalidators.AbstractPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
assert abvalid.ufopath == ufo2_test_success_path
assert abvalid.ufoversion == 2
assert abvalid.glyphs_dir_list == ufo2_dir_list
assert abvalid.testfile is None
assert isinstance(abvalid.ufoobj, Ufo2)
assert abvalid.test_fail_list == []
def test_validators_plist_abstractplist_ufo3_instantiation():
abvalid = plistvalidators.AbstractPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
assert abvalid.ufopath == ufo3_test_success_path
assert abvalid.ufoversion == 3
assert abvalid.glyphs_dir_list == ufo3_dir_list
assert abvalid.testfile is None
assert isinstance(abvalid.ufoobj, Ufo3)
assert abvalid.test_fail_list == []
def test_validators_plist_abstractplist_ufo3_unimplemented_ufolib_import_method():
with pytest.raises(NotImplementedError):
abvalid = plistvalidators.AbstractPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
abvalid.run_ufolib_import_validation()
# ///////////////////////////////////////////////////////
#
# metainfo.plist validator tests
#
# ///////////////////////////////////////////////////////
# Success tests
def test_validators_plist_ufo2_metainfo_successful_xml_ufolib_tests():
meta_validator = plistvalidators.MetainfoPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
xml_test_fails = meta_validator.run_xml_validation()
ufolib_test_fails = meta_validator.run_ufolib_import_validation()
assert isinstance(xml_test_fails, list)
assert isinstance(ufolib_test_fails, list)
assert len(xml_test_fails) == 0
assert len(ufolib_test_fails) == 0
def test_validators_plist_ufo3_metainfo_successful_xml_ufolib_tests():
meta_validator = plistvalidators.MetainfoPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
xml_test_fails = meta_validator.run_xml_validation()
ufolib_test_fails = meta_validator.run_ufolib_import_validation()
assert isinstance(xml_test_fails, list)
assert isinstance(ufolib_test_fails, list)
assert len(xml_test_fails) == 0
assert len(ufolib_test_fails) == 0
# Fail tests
def test_validators_plist_ufo2_metainfo_missing_file_fail(capsys):
meta_missingfile_ufo_path = os.path.join(metainfo_test_dir_failpath, 'UFO2-MissingMeta.ufo')
meta_validator = plistvalidators.MetainfoPlistValidator(meta_missingfile_ufo_path, 2, ufo2_dir_list)
meta_validator.run_xml_validation() # should not raise SystemExit
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = meta_validator.run_ufolib_import_validation()
assert len(fail_list) == 0
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO2-MissingMeta.ufo' in out
assert 'metainfo.plist' in out
def test_validators_plist_ufo3_metainfo_missing_file_fail(capsys):
meta_missingfile_ufo_path = os.path.join(metainfo_test_dir_failpath, 'UFO3-MissingMeta.ufo')
meta_validator = plistvalidators.MetainfoPlistValidator(meta_missingfile_ufo_path, 3, ufo3_dir_list)
meta_validator.run_xml_validation() # should not raise SystemExit
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = meta_validator.run_ufolib_import_validation()
assert len(fail_list) == 0
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO3-MissingMeta.ufo' in out
assert 'metainfo.plist' in out
def test_validators_plist_ufo2_metainfo_xml_fail(capsys):
meta_ufo_path = os.path.join(metainfo_test_dir_failpath, 'UFO2-XMLmeta.ufo')
meta_validator = plistvalidators.MetainfoPlistValidator(meta_ufo_path, 2, ufo2_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = meta_validator.run_xml_validation()
assert len(fail_list) == 1
assert 'metainfo.plist failed XML validation' in fail_list[0].test_long_stdstream_string
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO2-XMLmeta.ufo' in out
assert 'metainfo.plist' in out
def test_validators_plist_ufo3_metainfo_xml_fail(capsys):
meta_ufo_path = os.path.join(metainfo_test_dir_failpath, 'UFO3-XMLmeta.ufo')
meta_validator = plistvalidators.MetainfoPlistValidator(meta_ufo_path, 3, ufo3_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = meta_validator.run_xml_validation()
assert len(fail_list) == 1
assert 'metainfo.plist failed XML validation' in fail_list[0].test_long_stdstream_string
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO3-XMLmeta.ufo' in out
assert 'metainfo.plist' in out
def test_validators_plist_ufo2_metainfo_version_fail(capsys):
meta_ufo_path = os.path.join(metainfo_test_dir_failpath, 'UFO2-VersionFail.ufo')
meta_validator = plistvalidators.MetainfoPlistValidator(meta_ufo_path, 2, ufo2_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = meta_validator.run_ufolib_import_validation()
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'metainfo.plist' in out
def test_validators_plist_ufo3_metainfo_version_fail(capsys):
meta_ufo_path = os.path.join(metainfo_test_dir_failpath, 'UFO3-VersionFail.ufo')
meta_validator = plistvalidators.MetainfoPlistValidator(meta_ufo_path, 3, ufo3_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = meta_validator.run_ufolib_import_validation()
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'metainfo.plist' in out
# ///////////////////////////////////////////////////////
#
# fontinfo.plist validator tests
#
# ///////////////////////////////////////////////////////
# Success tests
def test_validators_plist_ufo2_fontinfo_success():
fontinfo_validator = plistvalidators.FontinfoPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
xml_fail_list = fontinfo_validator.run_xml_validation()
ufolib_fail_list = fontinfo_validator.run_ufolib_import_validation()
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_fontinfo_success():
fontinfo_validator = plistvalidators.FontinfoPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
xml_fail_list = fontinfo_validator.run_xml_validation()
ufolib_fail_list = fontinfo_validator.run_ufolib_import_validation()
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
# Fail tests
def test_validators_plist_ufo2_fontinfo_missing_file_fail(capsys):
fontinfo_ufo_path = os.path.join(fontinfo_test_dir_failpath, 'UFO2-MissingFI.ufo')
fontinfo_validator = plistvalidators.FontinfoPlistValidator(fontinfo_ufo_path, 2, ufo2_dir_list)
xml_fail_list = fontinfo_validator.run_xml_validation()
ufolib_fail_list = fontinfo_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_fontinfo_missing_file_fail(capsys):
fontinfo_ufo_path = os.path.join(fontinfo_test_dir_failpath, 'UFO3-MissingFI.ufo')
fontinfo_validator = plistvalidators.FontinfoPlistValidator(fontinfo_ufo_path, 3, ufo3_dir_list)
xml_fail_list = fontinfo_validator.run_xml_validation()
ufolib_fail_list = fontinfo_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo2_fontinfo_xml_fail(capsys):
fontinfo_ufo_path = os.path.join(fontinfo_test_dir_failpath, 'UFO2-XMLfi.ufo')
fontinfo_validator = plistvalidators.FontinfoPlistValidator(fontinfo_ufo_path, 2, ufo2_dir_list)
fail_list = fontinfo_validator.run_xml_validation()
assert len(fail_list) == 1
assert 'fontinfo.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_fontinfo_xml_fail(capsys):
fontinfo_ufo_path = os.path.join(fontinfo_test_dir_failpath, 'UFO3-XMLfi.ufo')
fontinfo_validator = plistvalidators.FontinfoPlistValidator(fontinfo_ufo_path, 3, ufo3_dir_list)
fail_list = fontinfo_validator.run_xml_validation()
assert len(fail_list) == 1
assert 'fontinfo.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo2_fontinfo_ufolib_import_fail(capsys):
fontinfo_ufo_path = os.path.join(fontinfo_test_dir_failpath, 'UFO2-UFOlibError.ufo')
fontinfo_validator = plistvalidators.FontinfoPlistValidator(fontinfo_ufo_path, 2, ufo2_dir_list)
fail_list = fontinfo_validator.run_ufolib_import_validation()
assert len(fail_list) == 1
assert 'fontinfo.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_fontinfo_ufolib_import_fail(capsys):
fontinfo_ufo_path = os.path.join(fontinfo_test_dir_failpath, 'UFO3-UFOlibError.ufo')
fontinfo_validator = plistvalidators.FontinfoPlistValidator(fontinfo_ufo_path, 3, ufo3_dir_list)
fail_list = fontinfo_validator.run_ufolib_import_validation()
assert len(fail_list) == 1
assert 'fontinfo.plist' in fail_list[0].test_long_stdstream_string
# ///////////////////////////////////////////////////////
#
# groups.plist validator tests
#
# ///////////////////////////////////////////////////////
# Success tests
def test_validators_plist_ufo2_groups_success():
groups_validator = plistvalidators.GroupsPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
xml_fail_list = groups_validator.run_xml_validation()
ufolib_fail_list = groups_validator.run_ufolib_import_validation()
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_groups_success():
groups_validator = plistvalidators.GroupsPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
xml_fail_list = groups_validator.run_xml_validation()
ufolib_fail_list = groups_validator.run_ufolib_import_validation()
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
# Fail tests
def test_validators_plist_ufo2_groups_missing_file_fail():
groups_ufo_path = os.path.join(groups_test_dir_failpath, 'UFO2-MissingGroups.ufo')
groups_validator = plistvalidators.GroupsPlistValidator(groups_ufo_path, 2, ufo2_dir_list)
xml_fail_list = groups_validator.run_xml_validation()
ufolib_fail_list = groups_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_groups_missing_file_fail():
groups_ufo_path = os.path.join(groups_test_dir_failpath, 'UFO3-MissingGroups.ufo')
groups_validator = plistvalidators.GroupsPlistValidator(groups_ufo_path, 3, ufo3_dir_list)
xml_fail_list = groups_validator.run_xml_validation()
ufolib_fail_list = groups_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo2_groups_xml_fail():
groups_ufo_path = os.path.join(groups_test_dir_failpath, 'UFO2-XMLgr.ufo')
groups_validator = plistvalidators.GroupsPlistValidator(groups_ufo_path, 2, ufo2_dir_list)
fail_list = groups_validator.run_xml_validation()
assert len(fail_list) == 1
assert 'groups.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_groups_xml_fail():
groups_ufo_path = os.path.join(groups_test_dir_failpath, 'UFO3-XMLgr.ufo')
groups_validator = plistvalidators.GroupsPlistValidator(groups_ufo_path, 3, ufo3_dir_list)
fail_list = groups_validator.run_xml_validation()
assert len(fail_list) == 1
assert 'groups.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo2_groups_ufo_import_fail():
groups_ufo_path = os.path.join(groups_test_dir_failpath, 'UFO2-UFOlibError.ufo')
groups_validator = plistvalidators.GroupsPlistValidator(groups_ufo_path, 2, ufo2_dir_list)
fail_list = groups_validator.run_ufolib_import_validation()
assert len(fail_list) == 1
assert 'groups.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_groups_ufo_import_fail():
groups_ufo_path = os.path.join(groups_test_dir_failpath, 'UFO3-UFOlibError.ufo')
groups_validator = plistvalidators.GroupsPlistValidator(groups_ufo_path, 3, ufo3_dir_list)
fail_list = groups_validator.run_ufolib_import_validation()
assert len(fail_list) == 1
assert 'groups.plist' in fail_list[0].test_long_stdstream_string
# ///////////////////////////////////////////////////////
#
# kerning.plist validator tests
#
# ///////////////////////////////////////////////////////
# Success tests
def test_validators_plist_ufo2_kerning_success():
kerning_validator = plistvalidators.KerningPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
xml_fail_list = kerning_validator.run_xml_validation()
ufolib_fail_list = kerning_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_kerning_success():
kerning_validator = plistvalidators.KerningPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
xml_fail_list = kerning_validator.run_xml_validation()
ufolib_fail_list = kerning_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
# Fail tests
def test_validators_plist_ufo2_kerning_missing_file_fail():
kerning_ufo_path = os.path.join(kerning_test_dir_failpath, 'UFO2-MissingKern.ufo')
kerning_validator = plistvalidators.KerningPlistValidator(kerning_ufo_path, 2, ufo2_dir_list)
xml_fail_list = kerning_validator.run_xml_validation()
ufolib_fail_list = kerning_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_kerning_missing_file_fail():
kerning_ufo_path = os.path.join(kerning_test_dir_failpath, 'UFO3-MissingKern.ufo')
kerning_validator = plistvalidators.KerningPlistValidator(kerning_ufo_path, 3, ufo3_dir_list)
xml_fail_list = kerning_validator.run_xml_validation()
ufolib_fail_list = kerning_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo2_kerning_xml_fail():
kerning_ufo_path = os.path.join(kerning_test_dir_failpath, 'UFO2-XMLkern.ufo')
kerning_validator = plistvalidators.KerningPlistValidator(kerning_ufo_path, 2, ufo2_dir_list)
xml_fail_list = kerning_validator.run_xml_validation()
assert isinstance(xml_fail_list, list)
assert len(xml_fail_list) == 1
assert 'kerning.plist' in xml_fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_kerning_xml_fail():
kerning_ufo_path = os.path.join(kerning_test_dir_failpath, 'UFO3-XMLkern.ufo')
kerning_validator = plistvalidators.KerningPlistValidator(kerning_ufo_path, 3, ufo3_dir_list)
xml_fail_list = kerning_validator.run_xml_validation()
assert isinstance(xml_fail_list, list)
assert len(xml_fail_list) == 1
assert 'kerning.plist' in xml_fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo2_kerning_ufo_import_fail():
kerning_ufo_path = os.path.join(kerning_test_dir_failpath, 'UFO2-UFOlibError.ufo')
kerning_validator = plistvalidators.KerningPlistValidator(kerning_ufo_path, 2, ufo2_dir_list)
fail_list = kerning_validator.run_ufolib_import_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'kerning.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_kerning_ufo_import_fail():
kerning_ufo_path = os.path.join(kerning_test_dir_failpath, 'UFO3-UFOlibError.ufo')
kerning_validator = plistvalidators.KerningPlistValidator(kerning_ufo_path, 3, ufo3_dir_list)
fail_list = kerning_validator.run_ufolib_import_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'kerning.plist' in fail_list[0].test_long_stdstream_string
# ///////////////////////////////////////////////////////
#
# lib.plist validator tests
#
# ///////////////////////////////////////////////////////
# Success tests
def test_validators_plist_ufo2_lib_success():
lib_validator = plistvalidators.LibPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
xml_fail_list = lib_validator.run_xml_validation()
ufolib_fail_list = lib_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_lib_success():
lib_validator = plistvalidators.LibPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
xml_fail_list = lib_validator.run_xml_validation()
ufolib_fail_list = lib_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
# Fail tests
def test_validators_plist_ufo2_lib_missing_file_fail():
lib_ufo_path = os.path.join(lib_test_dir_failpath, 'UFO2-MissingLib.ufo')
lib_validator = plistvalidators.LibPlistValidator(lib_ufo_path, 2, ufo2_dir_list)
xml_fail_list = lib_validator.run_xml_validation()
ufolib_fail_list = lib_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_lib_missing_file_fail():
lib_ufo_path = os.path.join(lib_test_dir_failpath, 'UFO3-MissingLib.ufo')
lib_validator = plistvalidators.LibPlistValidator(lib_ufo_path, 3, ufo3_dir_list)
xml_fail_list = lib_validator.run_xml_validation()
ufolib_fail_list = lib_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo2_lib_xml_fail():
lib_ufo_path = os.path.join(lib_test_dir_failpath, 'UFO2-XMLlib.ufo')
lib_validator = plistvalidators.LibPlistValidator(lib_ufo_path, 2, ufo2_dir_list)
fail_list = lib_validator.run_xml_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'lib.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_lib_xml_fail():
lib_ufo_path = os.path.join(lib_test_dir_failpath, 'UFO3-XMLlib.ufo')
lib_validator = plistvalidators.LibPlistValidator(lib_ufo_path, 3, ufo3_dir_list)
fail_list = lib_validator.run_xml_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'lib.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo2_lib_ufolib_import_fail():
lib_ufo_path = os.path.join(lib_test_dir_failpath, 'UFO2-UFOlibError.ufo')
lib_validator = plistvalidators.LibPlistValidator(lib_ufo_path, 2, ufo2_dir_list)
fail_list = lib_validator.run_ufolib_import_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'lib.plist' in fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_lib_ufolib_import_fail():
lib_ufo_path = os.path.join(lib_test_dir_failpath, 'UFO3-UFOlibError.ufo')
lib_validator = plistvalidators.LibPlistValidator(lib_ufo_path, 3, ufo3_dir_list)
fail_list = lib_validator.run_ufolib_import_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'lib.plist' in fail_list[0].test_long_stdstream_string
# ///////////////////////////////////////////////////////
#
# contents.plist validator tests
#
# ///////////////////////////////////////////////////////
# Success tests
def test_validators_plist_ufo2_contents_success():
lib_validator = plistvalidators.ContentsPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
xml_fail_list = lib_validator.run_xml_validation()
ufolib_fail_list = lib_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_contents_success():
lib_validator = plistvalidators.ContentsPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
xml_fail_list = lib_validator.run_xml_validation()
ufolib_fail_list = lib_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
# Fail tests
def test_validators_plist_ufo2_contents_missing_file_fail(capsys):
contents_ufo_path = os.path.join(contents_test_dir_failpath, 'UFO2-MissingCont.ufo')
contents_validator = plistvalidators.ContentsPlistValidator(contents_ufo_path, 2, ufo2_dir_list)
xml_fail_list = contents_validator.run_xml_validation()
with pytest.raises(SystemExit) as pytest_wrapped_e:
contents_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert len(xml_fail_list) == 1
out, _ = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'contents.plist is missing.' in out
def test_validators_plist_ufo3_contents_missing_file_fail(capsys):
contents_ufo_path = os.path.join(contents_test_dir_failpath, 'UFO3-MissingCont.ufo')
contents_validator = plistvalidators.ContentsPlistValidator(contents_ufo_path, 3, ufo3_dir_list)
xml_fail_list = contents_validator.run_xml_validation()
with pytest.raises(SystemExit) as pytest_wrapped_e:
contents_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert len(xml_fail_list) == 1
out, _ = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'contents.plist is missing.' in out
def test_validators_plist_ufo3_contents_missing_file_fail(capsys):
contents_ufo_path = os.path.join(contents_test_dir_failpath, 'UFO3-UnlistedGlifs.ufo')
contents_validator = plistvalidators.ContentsPlistValidator(contents_ufo_path, 3, ufo3_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
contents_validator.run_ufolib_import_validation()
out, _ = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'rogue files not listed in contents.plist: a.001.glif' in out
def test_validators_plist_ufo2_contents_xml_fail(capsys):
contents_ufo_path = os.path.join(contents_test_dir_failpath, 'UFO2-XMLcont.ufo')
contents_validator = plistvalidators.ContentsPlistValidator(contents_ufo_path, 2, ufo2_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
xml_fail_list = contents_validator.run_xml_validation()
assert isinstance(xml_fail_list, list)
assert len(xml_fail_list) == 1
assert 'contents.plist' in xml_fail_list[0].test_long_stdstream_string
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO2-XMLcont.ufo' in out
assert 'contents.plist' in out
def test_validators_plist_ufo3_contents_xml_fail(capsys):
contents_ufo_path = os.path.join(contents_test_dir_failpath, 'UFO3-XMLcont.ufo')
contents_validator = plistvalidators.ContentsPlistValidator(contents_ufo_path, 3, ufo3_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
xml_fail_list = contents_validator.run_xml_validation()
assert isinstance(xml_fail_list, list)
assert len(xml_fail_list) == 1
assert 'contents.plist' in xml_fail_list[0].test_long_stdstream_string
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO3-XMLcont.ufo' in out
assert 'contents.plist' in out
def test_validators_plist_ufo2_contents_ufolib_import_fail(capsys):
contents_ufo_path = os.path.join(contents_test_dir_failpath, 'UFO2-UFOlibError.ufo')
contents_validator = plistvalidators.ContentsPlistValidator(contents_ufo_path, 2, ufo2_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = contents_validator.run_ufolib_import_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'contents.plist' in fail_list[0].test_long_stdstream_string
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO2-UFOlibError.ufo' in out
assert 'contents.plist' in out
def test_validators_plist_ufo3_contents_ufolib_import_fail(capsys):
contents_ufo_path = os.path.join(contents_test_dir_failpath, 'UFO3-UFOlibError.ufo')
contents_validator = plistvalidators.ContentsPlistValidator(contents_ufo_path, 3, ufo3_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = contents_validator.run_ufolib_import_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'contents.plist' in fail_list[0].test_long_stdstream_string
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO3-UFOlibError.ufo' in out
assert 'contents.plist' in out
# ///////////////////////////////////////////////////////
#
# layercontents.plist validator tests
#
# ///////////////////////////////////////////////////////
# Success tests
def test_validators_plist_ufo2_layercontents_success():
"""
Not part of the UFO v2 spec so should not fail without the file in the source directory
"""
lc_validator = plistvalidators.LayercontentsPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
xml_fail_list = lc_validator.run_xml_validation()
ufolib_fail_list = lc_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_layercontents_success():
"""
UFO 3+ spec only
:return:
"""
lc_validator = plistvalidators.LayercontentsPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
xml_fail_list = lc_validator.run_xml_validation()
ufolib_fail_list = lc_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
# Fail tests
def test_validators_plist_ufo2_layercontents_missing_file_fail():
"""
Not part of UFO v2 spec should not fail on missing file
"""
lc_ufo_path = os.path.join(layercontents_test_dir_failpath, 'UFO2-MissingLC.ufo')
lc_validator = plistvalidators.LayercontentsPlistValidator(lc_ufo_path, 2, ufo2_dir_list)
xml_fail_list = lc_validator.run_xml_validation()
ufolib_fail_list = lc_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_layercontents_missing_file_fail():
"""
UFO v3+ spec only.
Does not fail here for missing file, missing mandatory file check is performed in runner.py module
"""
lc_ufo_path = os.path.join(layercontents_test_dir_failpath, 'UFO3-MissingLC.ufo')
lc_validator = plistvalidators.LayercontentsPlistValidator(lc_ufo_path, 3, ufo3_dir_list)
xml_fail_list = lc_validator.run_xml_validation()
ufolib_fail_list = lc_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_layercontents_xml_fail(capsys):
lc_ufo_path = os.path.join(layercontents_test_dir_failpath, 'UFO3-XMLlc.ufo')
lc_validator = plistvalidators.LayercontentsPlistValidator(lc_ufo_path, 3, ufo3_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
xml_fail_list = lc_validator.run_xml_validation()
assert isinstance(xml_fail_list, list)
assert len(xml_fail_list) == 1
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO3-XMLlc.ufo' in out
assert 'layercontents.plist' in out
def test_validators_plist_ufo3_layercontents_ufolib_import_fail(capsys):
lc_ufo_path = os.path.join(layercontents_test_dir_failpath, 'UFO3-UFOlibError.ufo')
lc_validator = plistvalidators.LayercontentsPlistValidator(lc_ufo_path, 3, ufo3_dir_list)
with pytest.raises(SystemExit) as pytest_wrapped_e:
fail_list = lc_validator.run_ufolib_import_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
out, err = capsys.readouterr()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
assert 'UFO3-UFOlibError.ufo' in out
assert 'layercontents.plist' in out
# ///////////////////////////////////////////////////////
#
# layerinfo.plist validator tests
#
# ///////////////////////////////////////////////////////
# Success tests
def test_validators_plist_ufo2_layerinfo_success():
"""
file is missing from UFO2 test directory. Should not fail, not part of UFO2 spec
"""
li_validator = plistvalidators.LayerinfoPlistValidator(ufo2_test_success_path, 2, ufo2_dir_list)
xml_fail_list = li_validator.run_xml_validation()
ufolib_fail_list = li_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_layerinfo_success():
li_validator = plistvalidators.LayerinfoPlistValidator(ufo3_test_success_path, 3, ufo3_dir_list)
xml_fail_list = li_validator.run_xml_validation()
ufolib_fail_list = li_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
# Fail tests
def test_validators_plist_ufo3_layerinfo_missing_file_fail():
li_ufo_path = os.path.join(layerinfo_test_dir_failpath, 'UFO3-MissingLI.ufo')
li_validator = plistvalidators.LayerinfoPlistValidator(li_ufo_path, 3, ufo3_dir_list)
xml_fail_list = li_validator.run_xml_validation()
ufolib_fail_list = li_validator.run_ufolib_import_validation()
assert isinstance(xml_fail_list, list)
assert isinstance(ufolib_fail_list, list)
assert len(xml_fail_list) == 0
assert len(ufolib_fail_list) == 0
def test_validators_plist_ufo3_layerinfo_xml_fail():
li_ufo_path = os.path.join(layerinfo_test_dir_failpath, 'UFO3-XMLli.ufo')
li_validator = plistvalidators.LayerinfoPlistValidator(li_ufo_path, 3, ufo3_dir_list)
xml_fail_list = li_validator.run_xml_validation()
assert isinstance(xml_fail_list, list)
assert len(xml_fail_list) == 1
assert 'layerinfo.plist' in xml_fail_list[0].test_long_stdstream_string
def test_validators_plist_ufo3_layerinfo_ufolib_import_fail():
li_ufo_path = os.path.join(layerinfo_test_dir_failpath, 'UFO3-UFOLibError.ufo')
li_validator = plistvalidators.LayerinfoPlistValidator(li_ufo_path, 3, ufo3_dir_list)
fail_list = li_validator.run_ufolib_import_validation()
assert isinstance(fail_list, list)
assert len(fail_list) == 1
assert 'layerinfo.plist' in fail_list[0].test_long_stdstream_string
| 38.038168
| 105
| 0.75577
| 4,604
| 34,881
| 5.306473
| 0.036056
| 0.081208
| 0.045025
| 0.056731
| 0.946257
| 0.938398
| 0.907904
| 0.904793
| 0.897057
| 0.86951
| 0
| 0.013594
| 0.133196
| 34,881
| 916
| 106
| 38.079694
| 0.794444
| 0.060119
| 0
| 0.70073
| 0
| 0
| 0.061939
| 0.002758
| 0
| 0
| 0
| 0
| 0.436131
| 1
| 0.114964
| false
| 0.00365
| 0.116788
| 0
| 0.231752
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
14f13ad58623a4f81601cf098e0c300b2bd40fcc
| 7,429
|
py
|
Python
|
tests/test_widgets.py
|
zztin/ctimer
|
a0721bb9791b23305ee4ff4a2eb30a1f4d8c80cc
|
[
"Apache-2.0"
] | null | null | null |
tests/test_widgets.py
|
zztin/ctimer
|
a0721bb9791b23305ee4ff4a2eb30a1f4d8c80cc
|
[
"Apache-2.0"
] | 35
|
2020-06-07T12:40:43.000Z
|
2021-06-03T19:56:01.000Z
|
tests/test_widgets.py
|
zztin/ctimer
|
a0721bb9791b23305ee4ff4a2eb30a1f4d8c80cc
|
[
"Apache-2.0"
] | 1
|
2020-06-04T05:31:03.000Z
|
2020-06-04T05:31:03.000Z
|
"""Tests for `model` module of `ctimer` package."""
import pytest
import tkinter as tk
from unittest.mock import MagicMock
from unittest.mock import patch
from ctimer import utils
from ctimer import ctimer_db as db
from ctimer.controller import CtimerClockController
from ctimer.view import CtimerClockView
from ctimer.model import CtimerClockModel
import os
@pytest.fixture
def controller():
print("Setup debugging-controller for testing.")
db_path_debug = utils.get_cache_filepath(None, debug=True)
# create a new empty db file.
if os.path.exists(f"{db_path_debug}/ctimer_debug_temp.db"):
os.remove(f"{db_path_debug}/ctimer_debug_temp.db")
db_file_debug = f"{db_path_debug}/ctimer_debug_temp.db"
db.create_connection(db_file_debug)
current_clock_details = db.Clock_details(db_file_debug)
tk_root = tk.Tk()
tk_root.wait_visibility()
ccc = CtimerClockController(
db_file_debug, current_clock_details, False, True, True, None, tk_root
)
yield ccc
print("Teardown the debugging-controller for testing...")
ccc.master.destroy()
print("Teardown the debugging-controller for testing. Done.")
def test_init(controller):
tv = controller.tv
tm = controller.tm
assert type(tv) is CtimerClockView
assert type(tm) is CtimerClockModel
assert tm.clock_details.reached_bool == False
assert tm.clock_details.reason == "N.A."
def test_click_start_reached_one_clock(controller):
expected_goal_string = "test_click_start fake goal for testing"
tv = controller.tv
tm = controller.tm
assert not controller.tm.clock_ticking
with \
patch('ctimer.view.simpledialog.askstring', MagicMock(return_value=expected_goal_string)), \
patch('ctimer.view.CtimerClockView.ask_reached_goal_reason',
MagicMock(return_value=(True, "Fake reached reason"))):
assert tm.clock_details.clock_count == 0
tv._button_start_pause.invoke()
controller.master.update()
assert tm.remaining_time > 1
one_clock_time = tm.remaining_time
assert tm.clock_details.task_description == expected_goal_string
assert controller.tm.clock_ticking
for ticking in range(one_clock_time - 1):
controller.countdown()
controller.master.update()
assert tm.remaining_time == 1
controller.countdown()
controller.master.update()
assert tm.remaining_time == 0
controller.countdown()
controller.master.update()
assert tm.clock_details.reached_bool
assert tm.clock_details.reason == "Fake reached reason"
assert tm.clock_details.clock_count == 1
db_path_debug = utils.get_cache_filepath(None, debug=True)
os.remove(f"{db_path_debug}/ctimer_debug_temp.db")
def test_click_start_reached_two_clock(controller):
expected_goal_string = "test_click_start fake goal for testing"
tv = controller.tv
tm = controller.tm
assert not controller.tm.clock_ticking
with \
patch('ctimer.view.simpledialog.askstring', MagicMock(return_value=expected_goal_string)), \
patch('ctimer.view.CtimerClockView.ask_reached_goal_reason',
MagicMock(return_value=(True, "Fake reached reason"))):
assert tm.clock_details.clock_count == 0
tv._button_start_pause.invoke()
controller.master.update()
one_clock_time = tm.remaining_time
for ticking in range(one_clock_time - 1):
controller.countdown()
controller.master.update()
assert tm.remaining_time == 1
controller.countdown()
controller.master.update()
assert tm.remaining_time == 0
controller.countdown()
controller.master.update()
assert tm.clock_details.reached_bool
assert tm.clock_details.reason == "Fake reached reason"
assert tm.clock_details.clock_count == 1
controller.master.update()
# A clock ended, starts break
assert tm.clock_ticking
for ticking in range(one_clock_time - 1):
controller.countdown()
controller.master.update()
assert tm.remaining_time == 1
controller.countdown()
controller.master.update()
assert tm.remaining_time == 0
controller.countdown()
controller.master.update()
# Break ends, stop ticking
assert not tm.clock_ticking
assert tm.clock_details.clock_count == 1
tv._button_start_pause.invoke()
controller.master.update()
one_clock_time = tm.remaining_time
for ticking in range(one_clock_time - 1):
controller.countdown()
controller.master.update()
assert tm.remaining_time == 1
controller.countdown()
controller.master.update()
assert tm.remaining_time == 0
controller.countdown()
controller.master.update()
assert tm.clock_details.clock_count == 2
# THis should be moved to test_model
def test_is_complete(controller):
expected_goal_string = "test_click_start fake goal for testing"
tv = controller.tv
tm = controller.tm
assert not controller.tm.clock_ticking
with \
patch('ctimer.view.simpledialog.askstring', MagicMock(return_value=expected_goal_string)), \
patch('ctimer.view.CtimerClockView.ask_reached_goal_reason',
MagicMock(return_value=(True, "Fake reached reason"))):
assert tm.clock_details.clock_count == 0
tv._button_start_pause.invoke()
controller.master.update()
one_clock_time = tm.remaining_time
for ticking in range(one_clock_time - 1):
controller.countdown()
controller.master.update()
tv._button_start_pause.invoke()
tv._button_start_pause.invoke()
assert not tm.clock_details.is_complete
assert tm.remaining_time == 1
controller.countdown()
controller.master.update()
assert tm.remaining_time == 0
controller.countdown()
controller.master.update()
assert tm.clock_details.reached_bool
assert tm.clock_details.reason == "Fake reached reason"
assert tm.clock_details.clock_count == 1
controller.master.update()
# A clock ended, starts break
assert tm.clock_ticking
for ticking in range(one_clock_time - 1):
controller.countdown()
controller.master.update()
assert tm.remaining_time == 1
controller.countdown()
controller.master.update()
assert tm.remaining_time == 0
controller.countdown()
controller.master.update()
# Break ends, stop ticking
assert not tm.clock_ticking
assert tm.clock_details.clock_count == 1
tv._button_start_pause.invoke()
controller.master.update()
one_clock_time = tm.remaining_time
for ticking in range(one_clock_time - 1):
controller.countdown()
controller.master.update()
assert tm.remaining_time == 1
controller.countdown()
controller.master.update()
assert tm.remaining_time == 0
controller.countdown()
controller.master.update()
assert tm.clock_details.clock_count == 2
assert not tm.clock_details.pause_toggled
assert tm.clock_details.is_complete
| 35.716346
| 104
| 0.676403
| 900
| 7,429
| 5.355556
| 0.13
| 0.061411
| 0.127801
| 0.15249
| 0.840041
| 0.798133
| 0.773651
| 0.747925
| 0.737344
| 0.730498
| 0
| 0.005644
| 0.236775
| 7,429
| 207
| 105
| 35.888889
| 0.844444
| 0.028941
| 0
| 0.767442
| 0
| 0
| 0.1069
| 0.055394
| 0
| 0
| 0
| 0
| 0.273256
| 1
| 0.02907
| false
| 0
| 0.05814
| 0
| 0.087209
| 0.017442
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
091e1531318761e7baa41d74240b9d7f74539b37
| 10,397
|
py
|
Python
|
src/outdoor_bot/srv/_digcams_service.py
|
dan-git/outdoor_bot
|
81bf75e26449f8e4b6a38f4049ca4d4cda7b8c04
|
[
"BSD-2-Clause"
] | null | null | null |
src/outdoor_bot/srv/_digcams_service.py
|
dan-git/outdoor_bot
|
81bf75e26449f8e4b6a38f4049ca4d4cda7b8c04
|
[
"BSD-2-Clause"
] | null | null | null |
src/outdoor_bot/srv/_digcams_service.py
|
dan-git/outdoor_bot
|
81bf75e26449f8e4b6a38f4049ca4d4cda7b8c04
|
[
"BSD-2-Clause"
] | null | null | null |
"""autogenerated by genpy from outdoor_bot/digcams_serviceRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class digcams_serviceRequest(genpy.Message):
_md5sum = "e66469e4a859b3bbec7c3e464d8de5f3"
_type = "outdoor_bot/digcams_serviceRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """string command
string filename
int32 cameraName
float32 zoom
bool write_file
"""
__slots__ = ['command','filename','cameraName','zoom','write_file']
_slot_types = ['string','string','int32','float32','bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
command,filename,cameraName,zoom,write_file
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(digcams_serviceRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.command is None:
self.command = ''
if self.filename is None:
self.filename = ''
if self.cameraName is None:
self.cameraName = 0
if self.zoom is None:
self.zoom = 0.
if self.write_file is None:
self.write_file = False
else:
self.command = ''
self.filename = ''
self.cameraName = 0
self.zoom = 0.
self.write_file = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.command
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.filename
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_ifB.pack(_x.cameraName, _x.zoom, _x.write_file))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.command = str[start:end].decode('utf-8')
else:
self.command = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.filename = str[start:end].decode('utf-8')
else:
self.filename = str[start:end]
_x = self
start = end
end += 9
(_x.cameraName, _x.zoom, _x.write_file,) = _struct_ifB.unpack(str[start:end])
self.write_file = bool(self.write_file)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.command
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.filename
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_ifB.pack(_x.cameraName, _x.zoom, _x.write_file))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.command = str[start:end].decode('utf-8')
else:
self.command = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.filename = str[start:end].decode('utf-8')
else:
self.filename = str[start:end]
_x = self
start = end
end += 9
(_x.cameraName, _x.zoom, _x.write_file,) = _struct_ifB.unpack(str[start:end])
self.write_file = bool(self.write_file)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_ifB = struct.Struct("<ifB")
"""autogenerated by genpy from outdoor_bot/digcams_serviceResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class digcams_serviceResponse(genpy.Message):
_md5sum = "6ed4b5a638436ae25b49ac666a6a3877"
_type = "outdoor_bot/digcams_serviceResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """int32 captureResult
bool zoomResult
bool writeFileResult
"""
__slots__ = ['captureResult','zoomResult','writeFileResult']
_slot_types = ['int32','bool','bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
captureResult,zoomResult,writeFileResult
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(digcams_serviceResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.captureResult is None:
self.captureResult = 0
if self.zoomResult is None:
self.zoomResult = False
if self.writeFileResult is None:
self.writeFileResult = False
else:
self.captureResult = 0
self.zoomResult = False
self.writeFileResult = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_i2B.pack(_x.captureResult, _x.zoomResult, _x.writeFileResult))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
_x = self
start = end
end += 6
(_x.captureResult, _x.zoomResult, _x.writeFileResult,) = _struct_i2B.unpack(str[start:end])
self.zoomResult = bool(self.zoomResult)
self.writeFileResult = bool(self.writeFileResult)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_i2B.pack(_x.captureResult, _x.zoomResult, _x.writeFileResult))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
_x = self
start = end
end += 6
(_x.captureResult, _x.zoomResult, _x.writeFileResult,) = _struct_i2B.unpack(str[start:end])
self.zoomResult = bool(self.zoomResult)
self.writeFileResult = bool(self.writeFileResult)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_i2B = struct.Struct("<i2B")
class digcams_service(object):
_type = 'outdoor_bot/digcams_service'
_md5sum = '40caf086f1c912d66f4fd74977f57548'
_request_class = digcams_serviceRequest
_response_class = digcams_serviceResponse
| 33.217252
| 123
| 0.646148
| 1,361
| 10,397
| 4.781043
| 0.124173
| 0.034424
| 0.027048
| 0.014753
| 0.828339
| 0.828339
| 0.816659
| 0.804057
| 0.804057
| 0.804057
| 0
| 0.01765
| 0.237088
| 10,397
| 312
| 124
| 33.323718
| 0.802698
| 0.22872
| 0
| 0.83871
| 1
| 0
| 0.097417
| 0.025174
| 0
| 0
| 0.002622
| 0
| 0
| 1
| 0.0553
| false
| 0
| 0.02765
| 0
| 0.198157
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
096376e5c1631c740d9335518b723d665572b974
| 42,393
|
py
|
Python
|
main_window_ui.py
|
junglefive/MacAddressCollector
|
0bf3ce51371ab0f8725c07c7a6c9512cc5ebc90d
|
[
"MIT"
] | null | null | null |
main_window_ui.py
|
junglefive/MacAddressCollector
|
0bf3ce51371ab0f8725c07c7a6c9512cc5ebc90d
|
[
"MIT"
] | null | null | null |
main_window_ui.py
|
junglefive/MacAddressCollector
|
0bf3ce51371ab0f8725c07c7a6c9512cc5ebc90d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main_window.ui'
#
# Created by: PyQt5 UI code generator 5.9
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setEnabled(True)
MainWindow.resize(678, 759)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(200, 400))
MainWindow.setMaximumSize(QtCore.QSize(16777215, 16777215))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 157, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 157, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 157, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
MainWindow.setPalette(palette)
MainWindow.setTabletTracking(False)
MainWindow.setLayoutDirection(QtCore.Qt.LeftToRight)
self.centralwidget = QtWidgets.QWidget(MainWindow)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 160, 160))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 160, 160))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 160, 160))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.centralwidget.setPalette(palette)
self.centralwidget.setObjectName("centralwidget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setContentsMargins(-1, -1, -1, 0)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setEnabled(True)
self.tabWidget.setBaseSize(QtCore.QSize(480, 590))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.tabWidget.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei")
self.tabWidget.setFont(font)
self.tabWidget.setAutoFillBackground(False)
self.tabWidget.setUsesScrollButtons(True)
self.tabWidget.setMovable(True)
self.tabWidget.setObjectName("tabWidget")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.gridLayout_3 = QtWidgets.QGridLayout(self.tab)
self.gridLayout_3.setObjectName("gridLayout_3")
self.btn_export_excle = QtWidgets.QPushButton(self.tab)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei")
font.setPointSize(24)
self.btn_export_excle.setFont(font)
self.btn_export_excle.setObjectName("btn_export_excle")
self.gridLayout_3.addWidget(self.btn_export_excle, 4, 0, 1, 1)
self.btn_save_log = QtWidgets.QPushButton(self.tab)
font = QtGui.QFont()
font.setPointSize(24)
self.btn_save_log.setFont(font)
self.btn_save_log.setObjectName("btn_save_log")
self.gridLayout_3.addWidget(self.btn_save_log, 1, 0, 1, 1)
self.progressBar_xls = QtWidgets.QProgressBar(self.tab)
self.progressBar_xls.setProperty("value", 0)
self.progressBar_xls.setObjectName("progressBar_xls")
self.gridLayout_3.addWidget(self.progressBar_xls, 3, 0, 1, 1)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.btn_change_voice = QtWidgets.QPushButton(self.tab)
self.btn_change_voice.setEnabled(False)
self.btn_change_voice.setObjectName("btn_change_voice")
self.horizontalLayout_3.addWidget(self.btn_change_voice)
self.btn_close_voice = QtWidgets.QPushButton(self.tab)
self.btn_close_voice.setEnabled(False)
self.btn_close_voice.setObjectName("btn_close_voice")
self.horizontalLayout_3.addWidget(self.btn_close_voice)
self.gridLayout_3.addLayout(self.horizontalLayout_3, 2, 0, 1, 1)
self.tabWidget.addTab(self.tab, "")
self.tab_watching = QtWidgets.QWidget()
self.tab_watching.setObjectName("tab_watching")
self.gridLayout_2 = QtWidgets.QGridLayout(self.tab_watching)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label = QtWidgets.QLabel(self.tab_watching)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei")
self.label.setFont(font)
self.label.setText("")
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 0, 1, 1, 1)
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setContentsMargins(-1, -1, -1, 20)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.textEdit_currentNum = QtWidgets.QTextEdit(self.tab_watching)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei")
self.textEdit_currentNum.setFont(font)
self.textEdit_currentNum.setReadOnly(True)
self.textEdit_currentNum.setObjectName("textEdit_currentNum")
self.horizontalLayout_2.addWidget(self.textEdit_currentNum)
self.horizontalLayout_2.setStretch(0, 2)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.btn_get_name = QtWidgets.QPushButton(self.tab_watching)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(107, 78, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(49, 48, 54))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(107, 78, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(49, 48, 54))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(49, 48, 54))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(107, 78, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(49, 48, 54))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(49, 48, 54))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(49, 48, 54))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.btn_get_name.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(28)
self.btn_get_name.setFont(font)
self.btn_get_name.setObjectName("btn_get_name")
self.verticalLayout.addWidget(self.btn_get_name)
self.plainText_display = QtWidgets.QPlainTextEdit(self.tab_watching)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(58, 255, 81))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(58, 255, 81))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(58, 255, 81))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
self.plainText_display.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei")
font.setPointSize(12)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.plainText_display.setFont(font)
self.plainText_display.setUndoRedoEnabled(False)
self.plainText_display.setReadOnly(True)
self.plainText_display.setBackgroundVisible(True)
self.plainText_display.setObjectName("plainText_display")
self.verticalLayout.addWidget(self.plainText_display)
self.verticalLayout.setStretch(0, 3)
self.verticalLayout.setStretch(2, 3)
self.gridLayout_2.addLayout(self.verticalLayout, 1, 0, 1, 1)
self.tabWidget.addTab(self.tab_watching, "")
self.tab_help = QtWidgets.QWidget()
self.tab_help.setObjectName("tab_help")
self.gridLayout_5 = QtWidgets.QGridLayout(self.tab_help)
self.gridLayout_5.setObjectName("gridLayout_5")
self.textBrowser_help = QtWidgets.QTextBrowser(self.tab_help)
self.textBrowser_help.setObjectName("textBrowser_help")
self.gridLayout_5.addWidget(self.textBrowser_help, 0, 0, 1, 1)
self.tabWidget.addTab(self.tab_help, "")
self.gridLayout.addWidget(self.tabWidget, 0, 0, 1, 1)
self.horizontalLayout.addLayout(self.gridLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.actionVersion = QtWidgets.QAction(MainWindow)
self.actionVersion.setObjectName("actionVersion")
self.actionAbout = QtWidgets.QAction(MainWindow)
self.actionAbout.setObjectName("actionAbout")
self.actionClose = QtWidgets.QAction(MainWindow)
self.actionClose.setObjectName("actionClose")
self.actionautoDetect = QtWidgets.QAction(MainWindow)
self.actionautoDetect.setCheckable(False)
self.actionautoDetect.setObjectName("actionautoDetect")
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MAC地址搜集器"))
self.btn_export_excle.setText(_translate("MainWindow", "导出excle"))
self.btn_save_log.setText(_translate("MainWindow", "重新开始计数"))
self.btn_change_voice.setText(_translate("MainWindow", "切换声音模式"))
self.btn_close_voice.setText(_translate("MainWindow", "点击关闭声音"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "操作"))
self.textEdit_currentNum.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Microsoft YaHei\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:72pt; color:#00aa00;\">0</span></p></body></html>"))
self.btn_get_name.setText(_translate("MainWindow", "点击自动识别串口"))
self.plainText_display.setPlainText(_translate("MainWindow", "等待中......"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_watching), _translate("MainWindow", "测试"))
self.textBrowser_help.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Microsoft YaHei\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:24pt; font-weight:600;\">帮助文档</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:24pt; font-weight:600;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:12pt;\">1. 先用</span><span style=\" font-size:18pt; font-weight:600;\">USB</span><span style=\" font-size:12pt;\">连接好测试架,确保测试架连接成功。</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:12pt;\">2. 点击[</span><span style=\" font-size:18pt; font-weight:600;\">测试</span><span style=\" font-size:12pt;\">]进入测试模式,自动识别测试架串口。如中途连接中断,则点击按键[</span><span style=\" font-size:12pt; font-weight:600;\">点击自动识别串口</span><span style=\" font-size:12pt;\">]即可再次识别。</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:12pt;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:12pt;\">3. 点击[</span><span style=\" font-size:18pt; font-weight:600;\">操作</span><span style=\" font-size:12pt;\">]可导出当前测试数据为[.csv]格式。</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:12pt;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:12pt;\">4. 点击[</span><span style=\" font-size:18pt; font-weight:600;\">重新开始计数</span><span style=\" font-size:12pt;\">], 会</span><span style=\" font-size:12pt; font-weight:600;\">删除</span><span style=\" font-size:12pt;\">当前已经测试的数据,请保证已经导出Excle再进行此操作。</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:12pt;\"><br /></p></body></html>"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_help), _translate("MainWindow", "帮助"))
self.actionVersion.setText(_translate("MainWindow", "版本号"))
self.actionAbout.setText(_translate("MainWindow", "关于"))
self.actionAbout.setIconText(_translate("MainWindow", "关于"))
self.actionClose.setText(_translate("MainWindow", "关闭"))
self.actionautoDetect.setText(_translate("MainWindow", "自动配置串口"))
| 62.804444
| 420
| 0.696318
| 5,117
| 42,393
| 5.738714
| 0.057065
| 0.142993
| 0.086634
| 0.113707
| 0.826154
| 0.813179
| 0.80361
| 0.798093
| 0.787434
| 0.777116
| 0
| 0.041281
| 0.168589
| 42,393
| 674
| 421
| 62.897626
| 0.791863
| 0.004293
| 0
| 0.767372
| 1
| 0
| 0.044997
| 0.005284
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003021
| false
| 0
| 0.001511
| 0
| 0.006042
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
11d5db1f0545607c442f4bc032433bd0c623c449
| 7,729
|
py
|
Python
|
tests/test_size.py
|
Club-Alpin-Annecy/Flask-Images
|
5c0d4028d3e6e04769ab7bb68258c02cd4769406
|
[
"BSD-3-Clause"
] | 75
|
2015-01-07T20:25:53.000Z
|
2021-11-01T17:49:00.000Z
|
tests/test_size.py
|
Club-Alpin-Annecy/Flask-Images
|
5c0d4028d3e6e04769ab7bb68258c02cd4769406
|
[
"BSD-3-Clause"
] | 30
|
2015-01-07T19:57:58.000Z
|
2021-08-31T09:14:33.000Z
|
tests/test_size.py
|
Club-Alpin-Annecy/Flask-Images
|
5c0d4028d3e6e04769ab7bb68258c02cd4769406
|
[
"BSD-3-Clause"
] | 46
|
2015-01-14T03:09:03.000Z
|
2022-02-01T20:18:50.000Z
|
from . import *
def make_transform(width=100, height=100):
return ['EXTENT', width, height, 0, 0, 100, 100]
class TestImageSize(TestCase):
def test_reshape(self):
s = ImageSize(transform=make_transform(), width=50)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 50)
self.assertEqual(s.height, 50)
s = ImageSize(transform=make_transform(), width=200, enlarge=True)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 200)
self.assertEqual(s.height, 200)
s = ImageSize(transform=make_transform(), width=200, enlarge=False)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 100)
self.assertEqual(s.height, 100)
s = ImageSize(transform=make_transform(), height=50)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 50)
self.assertEqual(s.height, 50)
s = ImageSize(transform=make_transform(), height=200, enlarge=True)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 200)
self.assertEqual(s.height, 200)
s = ImageSize(transform=make_transform(), height=200, enlarge=False)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 100)
self.assertEqual(s.height, 100)
def test_crop_enlarge(self):
# Both need enlarging.
s = ImageSize(transform=make_transform(), width=150, height=200, mode='crop', enlarge=True)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 200)
self.assertEqual(s.op_height, 200)
# One needs enlarging.
s = ImageSize(transform=make_transform(200, 100), width=150, height=200, mode='crop', enlarge=True)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 400)
self.assertEqual(s.op_height, 200)
# Neither need enlarging.
s = ImageSize(transform=make_transform(400, 400), width=150, height=200, mode='crop', enlarge=True)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 200)
self.assertEqual(s.op_height, 200)
def test_crop_no_enlarge(self):
# Both need enlarging.
s = ImageSize(transform=make_transform(), width=150, height=200, mode='crop', enlarge=False)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 100)
self.assertEqual(s.height, 100)
self.assertEqual(s.op_width, 100)
self.assertEqual(s.op_height, 100)
# One needs enlarging.
s = ImageSize(transform=make_transform(200, 100), width=150, height=200, mode='crop', enlarge=False)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 100) # <--
self.assertEqual(s.op_width, 200)
self.assertEqual(s.op_height, 100)
# Neither need enlarging.
s = ImageSize(transform=make_transform(400, 400), width=150, height=200, mode='crop', enlarge=False)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 200)
self.assertEqual(s.op_height, 200)
def test_fit_enlarge(self):
# Both need enlarging.
s = ImageSize(transform=make_transform(), width=150, height=200, mode='fit', enlarge=True)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 150)
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 150)
# One is big enough.
s = ImageSize(transform=make_transform(200, 100), width=150, height=200, mode='fit', enlarge=True)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 75)
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 75)
# Neither need enlarging.
s = ImageSize(transform=make_transform(400, 400), width=150, height=200, mode='fit', enlarge=True)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 150)
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 150)
def test_fit_no_enlarge(self):
# Both need enlarging.
s = ImageSize(transform=make_transform(), width=150, height=200, mode='fit', enlarge=False)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 100)
self.assertEqual(s.height, 100)
self.assertEqual(s.op_width, 100)
self.assertEqual(s.op_height, 100)
# One is big enough.
s = ImageSize(transform=make_transform(200, 100), width=150, height=200, mode='fit', enlarge=False)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 75) # <--
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 75)
# Neither need enlarging.
s = ImageSize(transform=make_transform(400, 400), width=150, height=200, mode='fit', enlarge=False)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 150)
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 150)
def test_pad_enlarge(self):
# Both need enlarging.
s = ImageSize(transform=make_transform(), width=150, height=200, mode='pad', enlarge=True)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 150)
# One is big enough.
s = ImageSize(transform=make_transform(200, 100), width=150, height=200, mode='pad', enlarge=True)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 75)
# Neither need enlarging.
s = ImageSize(transform=make_transform(400, 400), width=150, height=200, mode='pad', enlarge=True)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 150)
def test_pad_no_enlarge(self):
# Both need enlarging.
s = ImageSize(transform=make_transform(), width=150, height=200, mode='pad', enlarge=False)
self.assertTrue(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 100)
self.assertEqual(s.op_height, 100)
# One is big enough.
s = ImageSize(transform=make_transform(200, 100), width=150, height=200, mode='pad', enlarge=False)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200) # <--
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 75)
# Neither need enlarging.
s = ImageSize(transform=make_transform(400, 400), width=150, height=200, mode='pad', enlarge=False)
self.assertFalse(s.needs_enlarge)
self.assertEqual(s.width, 150)
self.assertEqual(s.height, 200)
self.assertEqual(s.op_width, 150)
self.assertEqual(s.op_height, 150)
| 38.452736
| 108
| 0.650149
| 999
| 7,729
| 4.928929
| 0.045045
| 0.25589
| 0.272949
| 0.1316
| 0.971162
| 0.971162
| 0.949838
| 0.949838
| 0.94537
| 0.94537
| 0
| 0.075497
| 0.225385
| 7,729
| 200
| 109
| 38.645
| 0.746952
| 0.051624
| 0
| 0.748252
| 0
| 0
| 0.009035
| 0
| 0
| 0
| 0
| 0
| 0.755245
| 1
| 0.055944
| false
| 0
| 0.006993
| 0.006993
| 0.076923
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
01205f82faa55eecc95322b862f5fdcb62830367
| 8,089
|
py
|
Python
|
tests/test_random_train_test_split.py
|
georgymh/decentralized-ml
|
117f998657a0a22c5c865814551082a40da00596
|
[
"Apache-2.0"
] | 11
|
2018-09-04T04:36:33.000Z
|
2021-07-14T13:11:35.000Z
|
tests/test_random_train_test_split.py
|
georgymh/decentralized-ml
|
117f998657a0a22c5c865814551082a40da00596
|
[
"Apache-2.0"
] | 30
|
2018-07-12T16:44:21.000Z
|
2019-01-23T23:36:59.000Z
|
tests/test_random_train_test_split.py
|
georgymh/decentralized-ml
|
117f998657a0a22c5c865814551082a40da00596
|
[
"Apache-2.0"
] | 4
|
2018-12-18T17:11:41.000Z
|
2021-07-14T13:11:42.000Z
|
import pandas as pd
import os
import pytest
import random
from data.iterators import count_datapoints
from data.iterators import create_random_test_dataset_iterator
from data.iterators import create_random_train_dataset_iterator
@pytest.fixture(scope='session')
def dataset_path():
return "tests/artifacts/iterators/random"
@pytest.fixture(scope='session')
def train_dataset_path():
return "tests/artifacts/iterators/random/train.csv"
@pytest.fixture(scope='session')
def test_dataset_path():
return "tests/artifacts/iterators/random/test.csv"
def get_num_columns(dataset_path):
"""
Helper function to find the number of columns in dataset
"""
for filename in os.listdir(dataset_path):
if not filename.endswith(".csv"): continue
full_path = os.path.join(dataset_path, filename)
f = open(full_path)
return len(list(f)[0].split(','))
def test_train_test_split(dataset_path, train_dataset_path, test_dataset_path):
"""
Test train-test split works by:
1. Checking the split put the right number of datapoints (proportional
to the total number) in the training and test datasets.
2. Checking that datapoints don't overlap between each dataset
"""
count = count_datapoints(dataset_path)
#Set up iterator for training set
random_train_iterator = create_random_train_dataset_iterator(
train_dataset_path,
batch_size=7,
labeler='label',
infinite=False
)
#Set up iterator for test set.
random_test_iterator = create_random_test_dataset_iterator(
test_dataset_path,
batch_size=7,
labeler='label',
infinite=False
)
#Collect "datapoints" for training and test set. In reality, just take the
#index so that overlapping points can be detected later.
train_data = []
for X,y in random_train_iterator:
for datapoint in X:
train_data.append(datapoint[-1])
test_data = []
for X,y in random_test_iterator:
for datapoint in X:
test_data.append(datapoint[-1])
#Check that training set and test set have right number of datapoints
assert len(train_data) == 0.8*count
assert len(test_data) == 0.2*count
#Set up dataframe for join
train_df = pd.DataFrame(data={"index": train_data})
test_df = pd.DataFrame(data={"index": test_data})
#Check for no overlapping datapoints
assert len(pd.merge(train_df, test_df, on='index').index) == 0
def test_large_batch_size(dataset_path, train_dataset_path, test_dataset_path):
"""
Same as above, except test with batch_size > count (datapoints)
"""
count = count_datapoints(dataset_path)
batch_size = count * random.randint(1, 5)
#Set up iterator for training set
random_train_iterator = create_random_train_dataset_iterator(
train_dataset_path,
batch_size=batch_size,
labeler='label',
infinite=False
)
#Set up iterator for test set.
random_test_iterator = create_random_test_dataset_iterator(
test_dataset_path,
batch_size=batch_size,
labeler='label',
infinite=False
)
#Collect "datapoints" for training and test set. In reality, just take the
#index so that overlapping points can be detected later.
train_data = []
for X,y in random_train_iterator:
for datapoint in X:
train_data.append(datapoint[-1])
test_data = []
for X,y in random_test_iterator:
for datapoint in X:
test_data.append(datapoint[-1])
#Check that training set and test set have right number of datapoints
assert len(train_data) == 0.8*count
assert len(test_data) == 0.2*count
#Set up dataframe for join
train_df = pd.DataFrame(data={"index": train_data})
test_df = pd.DataFrame(data={"index": test_data})
#Check for no overlapping datapoints
assert len(pd.merge(train_df, test_df, on='index').index) == 0
def test_infinite_works(dataset_path, train_dataset_path, test_dataset_path):
"""
With infinite=True, test 3 * count iterations to see that data is what we expect.
"""
count = count_datapoints(dataset_path)
#Set up iterator for training set
random_train_iterator = create_random_train_dataset_iterator(
train_dataset_path,
batch_size=4,
labeler='label',
infinite=True
)
#Set up iterator for test set.
random_test_iterator = create_random_test_dataset_iterator(
test_dataset_path,
batch_size=4,
labeler='label',
infinite=True
)
#Collect "datapoints" for training and test set. In reality, just take the
#index so that overlapping points can be detected later.
limit = 3 * count
num_datapoints = 0
train_data = []
for X,y in random_train_iterator:
for datapoint in X:
train_data.append(datapoint[-1])
num_datapoints += 1
if num_datapoints >= limit * 0.8:
break
num_datapoints = 0
test_data = []
for X,y in random_test_iterator:
for datapoint in X:
test_data.append(datapoint[-1])
num_datapoints += 1
if num_datapoints >= limit * 0.2:
break
#Check that training set and test set have right number of datapoints
assert len(train_data) == 0.8*count*3
assert len(test_data) == 0.2*count*3
#Set up dataframe for join
train_df = pd.DataFrame(data={"index": train_data})
test_df = pd.DataFrame(data={"index": test_data})
#Check for no overlapping datapoints
assert len(pd.merge(train_df, test_df, on='index').index) == 0
def test_invalid_batch_size(dataset_path, train_dataset_path, test_dataset_path):
"""
Test that assertion fails with invalid batch size.
"""
count = count_datapoints(dataset_path)
#Set up iterator for training set
random_train_iterator = create_random_train_dataset_iterator(
train_dataset_path,
batch_size=-1,
labeler='label',
infinite=False
)
#Set up iterator for test set.
random_test_iterator = create_random_test_dataset_iterator(
test_dataset_path,
batch_size=-1,
labeler='label',
infinite=False
)
#Assertion should fail here.
try:
train_data = []
for X,y in random_train_iterator:
for datapoint in X:
train_data.append(datapoint[-1])
assert False,"Assertion for batch size should have failed"
except AssertionError as e:
assert str(e) == "Invalid batch size provided."
def test_invalid_labeler(dataset_path, train_dataset_path, test_dataset_path):
"""
Test that assertion fails with invalid labeler.
"""
count = count_datapoints(dataset_path)
#Set up iterator for training set
random_train_iterator = create_random_train_dataset_iterator(
train_dataset_path,
batch_size=4,
labeler='bad_column',
infinite=True
)
#Set up iterator for test set.
random_test_iterator = create_random_test_dataset_iterator(
test_dataset_path,
batch_size=4,
labeler='bad_column',
infinite=True
)
#Assertion should fail here.
try:
train_data = []
for X,y in random_train_iterator:
for datapoint in X:
train_data.append(datapoint[-1])
assert False,"Assertion for labeler should have failed."
except AssertionError as e:
assert str(e) == "Labeler is invalid."
def test_invalid_dataset_path():
dataset_path = "bad/dataset/path"
try:
count_datapoints(dataset_path)
assert False, "Assertion for dataset path should have failed."
except AssertionError as e:
assert(str(e) == "Dataset path is invalid.")
| 31.846457
| 85
| 0.655334
| 1,052
| 8,089
| 4.81749
| 0.129278
| 0.09116
| 0.034728
| 0.04341
| 0.810576
| 0.780189
| 0.766377
| 0.734412
| 0.726125
| 0.706985
| 0
| 0.008067
| 0.264433
| 8,089
| 253
| 86
| 31.972332
| 0.843697
| 0.206453
| 0
| 0.70122
| 0
| 0
| 0.073609
| 0.018283
| 0
| 0
| 0
| 0
| 0.109756
| 1
| 0.060976
| false
| 0
| 0.042683
| 0.018293
| 0.128049
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
016c362002bc60f9f082f7e2e1b3e2b285d795c4
| 103
|
py
|
Python
|
kwat/array/check_not_nan.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-05-05T17:50:28.000Z
|
2019-01-30T19:23:02.000Z
|
kwat/array/check_not_nan.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-05-05T01:52:31.000Z
|
2019-04-20T21:06:05.000Z
|
kwat/array/check_not_nan.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-07-17T18:55:54.000Z
|
2019-02-02T04:46:19.000Z
|
from numpy import isnan, logical_not
def check_not_nan(nu___):
return logical_not(isnan(nu___))
| 14.714286
| 36
| 0.76699
| 16
| 103
| 4.3125
| 0.6875
| 0.289855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15534
| 103
| 6
| 37
| 17.166667
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6d72b41ed2a7a60644d2246484e7f29852685b36
| 87
|
py
|
Python
|
neighgen/__init__.py
|
Privex/neighgen
|
912e2fbaddfe75610af1f87d8259fd23c180927a
|
[
"X11",
"MIT"
] | 1
|
2021-09-20T06:29:08.000Z
|
2021-09-20T06:29:08.000Z
|
neighgen/__init__.py
|
Privex/neighgen
|
912e2fbaddfe75610af1f87d8259fd23c180927a
|
[
"X11",
"MIT"
] | null | null | null |
neighgen/__init__.py
|
Privex/neighgen
|
912e2fbaddfe75610af1f87d8259fd23c180927a
|
[
"X11",
"MIT"
] | null | null | null |
from neighgen.core import *
from neighgen.version import *
from neighgen.ngen import *
| 21.75
| 30
| 0.793103
| 12
| 87
| 5.75
| 0.5
| 0.521739
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 87
| 3
| 31
| 29
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6d8269ed6bbda3e78282a680b2381c72aebb418c
| 166
|
py
|
Python
|
analysis_data/__init__.py
|
solmannn/alu
|
1e31b8a39a4718f32b4a8d3f5614553744fd2aad
|
[
"MIT"
] | null | null | null |
analysis_data/__init__.py
|
solmannn/alu
|
1e31b8a39a4718f32b4a8d3f5614553744fd2aad
|
[
"MIT"
] | null | null | null |
analysis_data/__init__.py
|
solmannn/alu
|
1e31b8a39a4718f32b4a8d3f5614553744fd2aad
|
[
"MIT"
] | null | null | null |
from analysis_data.AnalysisConfig import AnalysisConfig
from analysis_data.AnalysisResult import AnalysisResult
from analysis_data.AnalysisResult import AnalysisBits
| 41.5
| 55
| 0.909639
| 18
| 166
| 8.222222
| 0.388889
| 0.243243
| 0.324324
| 0.405405
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072289
| 166
| 3
| 56
| 55.333333
| 0.961039
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6d96cc75b23e33f113e264a65a0ac4c117212a48
| 14,397
|
py
|
Python
|
tests/test_console.py
|
5H3LL3H5/hm310p
|
a334c87e18c1420f6ae60e51737f3b5a47744e88
|
[
"MIT"
] | null | null | null |
tests/test_console.py
|
5H3LL3H5/hm310p
|
a334c87e18c1420f6ae60e51737f3b5a47744e88
|
[
"MIT"
] | null | null | null |
tests/test_console.py
|
5H3LL3H5/hm310p
|
a334c87e18c1420f6ae60e51737f3b5a47744e88
|
[
"MIT"
] | null | null | null |
# tests/test_console.py
import click.testing
import pytest
from hm310p_cli import console
sport: str = "/dev/ttyS0"
pstate: str = "off"
vout: float = 12.00
iout: float = 1.000
ovp: float = 12.50
ocp: float = 1.100
iMinA: float = 0.0
iMaxA: float = 10.0
uMinV: float = 0.0
uMaxV: float = 30.0
arglist = [
f"--port={sport}",
f"--powerstate=off",
f"--vout={vout:02.2f}",
f"--iout={iout:02.3f}",
f"--ovp={ovp:02.2f}",
f"--ocp={ocp:02.3f}",
"--debug",
]
@pytest.fixture
def runner():
return click.testing.CliRunner()
def test_main_fails_without_args(runner):
result = runner.invoke(console.main)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error:"])
assert result.exit_code != 0
def test_main_fails_without_arg_port(runner):
tmplist = list(arglist)
del tmplist[0]
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error:", "-p", "--port"])
assert result.exit_code != 0
def test_main_fails_without_arg_powerstate(runner):
tmplist = list(arglist)
del tmplist[1]
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error:", "-s", "--powerstate"])
assert result.exit_code != 0
def test_main_fails_without_arg_vout(runner):
tmplist = list(arglist)
del tmplist[2]
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error:", "-V", "--vout"])
assert result.exit_code != 0
def test_main_fails_without_arg_iout(runner):
tmplist = list(arglist)
del tmplist[3]
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error:", "-I", "--iout"])
assert result.exit_code != 0
def test_main_fails_with_invalid_powerstate(runner):
tmplist = list(arglist)
tmplist[1] = "--powerstate=onoff"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error:", "-s", "--powerstate"])
assert result.exit_code != 0
def test_main_succeeds_without_debug(runner):
tmplist = list(arglist)
del tmplist[6]
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert result.exit_code == 0
def test_main_succeeds_with_powerstate_on(runner):
tmplist = list(arglist)
tmplist[1] = "--powerstate=on"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Port\t\t: {sport}",
"Powerstate\t: on",
f"Vout\t\t: {vout:02.3f} V",
f"OVP\t\t: {ovp:02.3f} V",
f"Iout\t\t: {iout:02.3f} A",
f"OCP\t\t: {ocp:02.3f} A",
]
)
assert result.exit_code == 0
def test_main_succeeds_without_arg_ovp(runner):
tmplist = list(arglist)
del tmplist[4]
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert result.exit_code == 0
def test_main_succeeds_without_arg_ocp(runner):
tmplist = list(arglist)
del tmplist[5]
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert result.exit_code == 0
def test_main_succeeds_with_arg_ovp_and_with_arg_ocp(runner):
tmplist = list(arglist)
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Port\t\t: {sport}",
f"Powerstate\t: {pstate}",
f"Vout\t\t: {vout:02.3f} V",
f"OVP\t\t: {ovp:02.3f} V",
f"Iout\t\t: {iout:02.3f} A",
f"OCP\t\t: {ocp:02.3f} A",
]
)
assert result.exit_code == 0
def test_main_succeeds_without_arg_ovp_but_with_arg_ocp(runner):
tmplist = list(arglist)
del tmplist[4]
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Port\t\t: {sport}",
f"Powerstate\t: {pstate}",
f"Vout\t\t: {vout:02.3f} V",
f"OVP\t\t: {vout*1.05:02.3f} V => OVP not given,",
f"Iout\t\t: {iout:02.3f} A",
f"OCP\t\t: {ocp:02.3f} A",
]
)
assert result.exit_code == 0
def test_main_succeeds_with_arg_ovp_but_without_arg_ocp(runner):
tmplist = list(arglist)
del tmplist[5]
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Port\t\t: {sport}",
f"Powerstate\t: {pstate}",
f"Vout\t\t: {vout:02.3f} V",
f"OVP\t\t: {ovp:02.3f} V",
f"Iout\t\t: {iout:02.3f} A",
f"OCP\t\t: {iout*1.05:02.3f} A => OCP not given,",
]
)
assert result.exit_code == 0
def test_main_succeeds_without_arg_ovp_and_without_arg_ocp(runner):
tmplist = list(arglist)
del tmplist[4:6]
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Port\t\t: {sport}",
f"Powerstate\t: {pstate}",
f"Vout\t\t: {vout:02.3f} V",
f"OVP\t\t: {vout*1.05:02.3f} V => OVP not given,",
f"Iout\t\t: {iout:02.3f} A",
f"OCP\t\t: {iout*1.05:02.3f} A => OCP not given,",
]
)
assert result.exit_code == 0
def test_main_succeeds_ovp_clipped_to_upper_interval_limit(runner):
tmplist = list(arglist)
del tmplist[4:6]
tmp_vout = uMaxV / (1.05 - 0.0001)
tmplist[2] = f"--vout={tmp_vout:02.3f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Vout\t\t: {tmp_vout:02.3f} V",
f"OVP\t\t: {uMaxV:02.3f} V => OVP not given, clipped",
]
)
assert result.exit_code == 0
def test_main_succeeds_ocp_clipped_to_upper_interval_limit(runner):
tmplist = list(arglist)
del tmplist[4:6]
tmp_iout = iMaxA / (1.05 - 0.0001)
tmplist[3] = f"--iout={tmp_iout:02.3f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Iout\t\t: {tmp_iout:02.3f} A",
f"OCP\t\t: {iMaxA:02.3f} A => OCP not given, clipped",
]
)
assert result.exit_code == 0
def test_main_succeeds_ovp_and_ocp_clipped_to_upper_interval_limit(runner):
tmplist = list(arglist)
del tmplist[4:6]
tmp_vout = uMaxV / (1.05 - 0.0001)
tmp_iout = iMaxA / (1.05 - 0.0001)
tmplist[2] = f"--vout={tmp_vout:02.3f}"
tmplist[3] = f"--iout={tmp_iout:02.3f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Vout\t\t: {tmp_vout:02.3f} V",
f"OVP\t\t: {uMaxV:02.3f} V => OVP not given, clipped",
f"Iout\t\t: {tmp_iout:02.3f} A",
f"OCP\t\t: {iMaxA:02.3f} A => OCP not given, clipped",
]
)
assert result.exit_code == 0
def test_main_fails_with_ovp_smaller_than_vout(runner):
tmplist = list(arglist)
tmplist[4] = f"--ovp={vout-1:02.3f}"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(
x in result.output
for x in ["Usage", f"Error: OVP={vout-1:02.3f} V < Vout={vout:02.3f} V"]
)
assert result.exit_code != 0
def test_main_fails_with_ocp_smaller_than_iout(runner):
tmplist = list(arglist)
tmplist[5] = f"--ocp={iout-1:02.3f}"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(
x in result.output
for x in ["Usage", f"Error: OCP={iout-1:02.3f} A < Iout={iout:02.3f} A"]
)
assert result.exit_code != 0
def test_main_fails_with_vout_out_of_upper_interval_limit(runner):
tmplist = list(arglist)
tmplist[2] = "--vout=50"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error: Invalid value"])
assert result.exit_code != 0
def test_main_fails_with_iout_out_of_upper_interval_limit(runner):
tmplist = list(arglist)
tmplist[3] = "--iout=50"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error: Invalid value"])
assert result.exit_code != 0
def test_main_fails_with_ovp_out_of_upper_interval_limit(runner):
tmplist = list(arglist)
tmplist[4] = "--ovp=50"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error: Invalid value"])
assert result.exit_code != 0
def test_main_fails_with_ocp_out_of_upper_interval_limit(runner):
tmplist = list(arglist)
tmplist[5] = "--ocp=50"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error: Invalid value"])
assert result.exit_code != 0
def test_main_fails_with_vout_out_of_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[2] = "--vout=-1"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error: Invalid value"])
assert result.exit_code != 0
def test_main_fails_with_iout_out_of_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[3] = "--iout=-1"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error: Invalid value"])
assert result.exit_code != 0
def test_main_fails_with_ovp_out_of_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[4] = "--ovp=-1"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error: Invalid value"])
assert result.exit_code != 0
def test_main_fails_with_ocp_out_of_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[5] = "--ocp=-1"
result = runner.invoke(console.main, tmplist)
assert result.exception
assert all(x in result.output for x in ["Usage", "Error: Invalid value"])
assert result.exit_code != 0
def test_main_succeeds_with_vout_at_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[2] = f"--vout={uMinV:f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Vout\t\t: {uMinV:02.3f} V",
]
)
assert result.exit_code == 0
def test_main_succeeds_with_vout_and_ovp_at_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[2] = f"--vout={uMinV:f}"
tmplist[4] = f"--ovp={uMinV:f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Vout\t\t: {uMinV:02.3f} V",
f"OVP\t\t: {uMinV:02.3f} V",
]
)
assert result.exit_code == 0
def test_main_succeeds_with_iout_at_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[3] = f"--iout={iMinA:f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Iout\t\t: {iMinA:02.3f} A",
]
)
assert result.exit_code == 0
def test_main_succeeds_with_iout_and_ocp_at_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[3] = f"--iout={iMinA:f}"
tmplist[5] = f"--ocp={iMinA:f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Iout\t\t: {iMinA:02.3f} A",
f"OCP\t\t: {iMinA:02.3f} A",
]
)
assert result.exit_code == 0
def test_main_succeeds_with_vout_ovp_iout_and_ocp_at_lower_interval_limit(runner):
tmplist = list(arglist)
tmplist[2] = f"--vout={uMinV:f}"
tmplist[3] = f"--iout={iMinA:f}"
tmplist[4] = f"--ovp={uMinV:f}"
tmplist[5] = f"--ocp={iMinA:f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Vout\t\t: {uMinV:02.3f} V",
f"OVP\t\t: {uMinV:02.3f} V",
f"Iout\t\t: {iMinA:02.3f} A",
f"OCP\t\t: {iMinA:02.3f} A",
]
)
assert result.exit_code == 0
def test_main_succeeds_with_vout_and_ovp_at_upper_interval_limit(runner):
tmplist = list(arglist)
tmplist[2] = f"--vout={uMaxV:f}"
tmplist[4] = f"--ovp={uMaxV:f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Vout\t\t: {uMaxV:02.3f} V",
f"OVP\t\t: {uMaxV:02.3f} V",
]
)
assert result.exit_code == 0
def test_main_succeeds_with_iout_and_ocp_at_upper_interval_limit(runner):
tmplist = list(arglist)
tmplist[3] = f"--iout={iMaxA:f}"
tmplist[5] = f"--ocp={iMaxA:f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Iout\t\t: {iMaxA:02.3f} A",
f"OCP\t\t: {iMaxA:02.3f} A",
]
)
assert result.exit_code == 0
def test_main_succeeds_with_vout_ovp_iout_and_ocp_at_upper_interval_limit(runner):
tmplist = list(arglist)
tmplist[2] = f"--vout={uMaxV:f}"
tmplist[3] = f"--iout={iMaxA:f}"
tmplist[4] = f"--ovp={uMaxV:f}"
tmplist[5] = f"--ocp={iMaxA:f}"
result = runner.invoke(console.main, tmplist)
assert not result.exception
assert all(
x in result.output
for x in [
f"Vout\t\t: {uMaxV:02.3f} V",
f"OVP\t\t: {uMaxV:02.3f} V",
f"Iout\t\t: {iMaxA:02.3f} A",
f"OCP\t\t: {iMaxA:02.3f} A",
]
)
assert result.exit_code == 0
| 29.684536
| 85
| 0.622144
| 2,169
| 14,397
| 3.979714
| 0.047948
| 0.022243
| 0.044601
| 0.101367
| 0.945783
| 0.931186
| 0.904078
| 0.880908
| 0.877549
| 0.867122
| 0
| 0.032583
| 0.241092
| 14,397
| 484
| 86
| 29.745868
| 0.757459
| 0.001459
| 0
| 0.708333
| 0
| 0.014706
| 0.178099
| 0.0064
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.088235
| false
| 0
| 0.007353
| 0.002451
| 0.098039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
096c479cd35b262be91ab059d7d9230b719cd331
| 3,454
|
py
|
Python
|
d4s2_api/migrations/0013_1_dds_id_fields_not_null.py
|
Duke-GCB/D4S2
|
47bef4b632967440608f2cc7a3fc31c32b2060fa
|
[
"MIT"
] | null | null | null |
d4s2_api/migrations/0013_1_dds_id_fields_not_null.py
|
Duke-GCB/D4S2
|
47bef4b632967440608f2cc7a3fc31c32b2060fa
|
[
"MIT"
] | 138
|
2016-09-23T18:09:18.000Z
|
2022-03-03T15:50:19.000Z
|
d4s2_api/migrations/0013_1_dds_id_fields_not_null.py
|
Duke-GCB/D4S2
|
47bef4b632967440608f2cc7a3fc31c32b2060fa
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2018-01-26 18:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('d4s2_api', '0013_0_null_to_blank_history_fields'),
]
operations = [
migrations.AlterField(
model_name='delivery',
name='from_user_id',
field=models.CharField(default='', help_text='DukeDS uuid user sending delivery', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='delivery',
name='project_id',
field=models.CharField(default='', help_text='DukeDS uuid project to deliver', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='delivery',
name='to_user_id',
field=models.CharField(default='', help_text='DukeDS uuid user receiving delivery', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='historicaldelivery',
name='from_user_id',
field=models.CharField(default='', help_text='DukeDS uuid user sending delivery', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='historicaldelivery',
name='project_id',
field=models.CharField(default='', help_text='DukeDS uuid project to deliver', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='historicaldelivery',
name='to_user_id',
field=models.CharField(default='', help_text='DukeDS uuid user receiving delivery', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='historicalshare',
name='from_user_id',
field=models.CharField(default='', help_text='DukeDS uuid user sharing the project', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='historicalshare',
name='project_id',
field=models.CharField(default='', help_text='DukeDS uuid project to share with', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='historicalshare',
name='to_user_id',
field=models.CharField(default='', help_text='DukeDS uuid user having project shared with them', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='share',
name='from_user_id',
field=models.CharField(default='', help_text='DukeDS uuid user sharing the project', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='share',
name='project_id',
field=models.CharField(default='', help_text='DukeDS uuid project to share with', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='share',
name='to_user_id',
field=models.CharField(default='', help_text='DukeDS uuid user having project shared with them', max_length=255),
preserve_default=False,
),
]
| 39.25
| 125
| 0.607412
| 359
| 3,454
| 5.62117
| 0.183844
| 0.11893
| 0.148662
| 0.172448
| 0.900892
| 0.900892
| 0.882557
| 0.882557
| 0.882557
| 0.882557
| 0
| 0.023896
| 0.285177
| 3,454
| 87
| 126
| 39.701149
| 0.793439
| 0.019398
| 0
| 0.9
| 1
| 0
| 0.218381
| 0.010343
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
097548ee04061fd46c967ffb92f16b17ea720986
| 5,098
|
py
|
Python
|
tests/func/test_doc_serpyco.py
|
raphj/hapic
|
b169ee901005bbe535e27ec878a051c2c1226e43
|
[
"MIT"
] | 20
|
2017-10-13T11:23:33.000Z
|
2021-12-09T12:42:06.000Z
|
tests/func/test_doc_serpyco.py
|
raphj/hapic
|
b169ee901005bbe535e27ec878a051c2c1226e43
|
[
"MIT"
] | 130
|
2017-10-10T15:09:13.000Z
|
2021-12-30T10:36:08.000Z
|
tests/func/test_doc_serpyco.py
|
raphj/hapic
|
b169ee901005bbe535e27ec878a051c2c1226e43
|
[
"MIT"
] | 7
|
2017-10-17T07:24:42.000Z
|
2021-09-16T14:33:17.000Z
|
# coding: utf-8
import dataclasses
from hapic import Hapic
from hapic.error.serpyco import SerpycoDefaultErrorBuilder
from hapic.ext.agnostic.context import AgnosticApp
from hapic.ext.agnostic.context import AgnosticContext
from hapic.processor.serpyco import SerpycoProcessor
from tests.base import serpyco_compatible_python
@serpyco_compatible_python
class TestDocSerpyco(object):
"""
Test doc generation for serpyco with AgnosticContext
"""
def test_func__ok__doc__exclude_in_processor_output_body(self,):
app = AgnosticApp()
hapic = Hapic()
hapic.set_processor_class(SerpycoProcessor)
hapic.set_context(AgnosticContext(app, default_error_builder=SerpycoDefaultErrorBuilder()))
@dataclasses.dataclass
class UserSchema(object):
id: int
name: str
password: str
@hapic.with_api_doc()
@hapic.output_body(UserSchema, processor=SerpycoProcessor(exclude=["password"]))
def my_view():
pass
app.route("/hello", "GET", callback=my_view)
doc = hapic.generate_doc()
assert (
"#/definitions/UserSchema_exclude_password"
== doc["paths"]["/hello"]["get"]["responses"]["200"]["schema"]["$ref"]
)
assert "UserSchema_exclude_password" in doc["definitions"]
def test_func__ok__doc__exclude_in_processor_input_body(self,):
app = AgnosticApp()
hapic = Hapic()
hapic.set_processor_class(SerpycoProcessor)
hapic.set_context(AgnosticContext(app, default_error_builder=SerpycoDefaultErrorBuilder()))
@dataclasses.dataclass
class UserSchema(object):
id: int
name: str
password: str
@hapic.with_api_doc()
@hapic.input_body(UserSchema, processor=SerpycoProcessor(exclude=["password"]))
def my_view(hapic_data):
pass
app.route("/hello", "GET", callback=my_view)
doc = hapic.generate_doc()
assert (
"#/definitions/UserSchema_exclude_password"
== doc["paths"]["/hello"]["get"]["parameters"][0]["schema"]["$ref"]
)
assert "UserSchema_exclude_password" in doc["definitions"]
def test_func__ok__doc__exclude_in_processor_input_query(self,):
app = AgnosticApp()
hapic = Hapic()
hapic.set_processor_class(SerpycoProcessor)
hapic.set_context(AgnosticContext(app, default_error_builder=SerpycoDefaultErrorBuilder()))
@dataclasses.dataclass
class UserSchema(object):
id: int
name: str
password: str
@hapic.with_api_doc()
@hapic.input_query(UserSchema, processor=SerpycoProcessor(exclude=["password"]))
def my_view(hapic_data):
pass
app.route("/hello", "GET", callback=my_view)
doc = hapic.generate_doc()
assert 2 == len(doc["paths"]["/hello"]["get"]["parameters"])
assert "id" == doc["paths"]["/hello"]["get"]["parameters"][0]["name"]
assert "name" == doc["paths"]["/hello"]["get"]["parameters"][1]["name"]
assert "UserSchema_exclude_password" in doc["definitions"]
def test_func__ok__doc__exclude_in_processor_input_path(self,):
app = AgnosticApp()
hapic = Hapic()
hapic.set_processor_class(SerpycoProcessor)
hapic.set_context(AgnosticContext(app, default_error_builder=SerpycoDefaultErrorBuilder()))
@dataclasses.dataclass
class UserSchema(object):
id: int
name: str
password: str
@hapic.with_api_doc()
@hapic.input_path(UserSchema, processor=SerpycoProcessor(exclude=["password"]))
def my_view(hapic_data):
pass
app.route("/hello/{id}/{name}", "GET", callback=my_view)
doc = hapic.generate_doc()
assert 2 == len(doc["paths"]["/hello/{id}/{name}"]["get"]["parameters"])
assert "id" == doc["paths"]["/hello/{id}/{name}"]["get"]["parameters"][0]["name"]
assert "name" == doc["paths"]["/hello/{id}/{name}"]["get"]["parameters"][1]["name"]
assert "UserSchema_exclude_password" in doc["definitions"]
def test_func__ok__doc__with_handle_exception(self):
app = AgnosticApp()
hapic = Hapic()
hapic.set_processor_class(SerpycoProcessor)
hapic.set_context(AgnosticContext(app, default_error_builder=SerpycoDefaultErrorBuilder()))
@dataclasses.dataclass
class UserSchema(object):
id: int
name: str
password: str
@hapic.with_api_doc()
@hapic.handle_exception(ZeroDivisionError, http_code=400)
def my_view(hapic_data):
1 / 0
app.route("/hello", "GET", callback=my_view)
doc = hapic.generate_doc()
assert "DefaultErrorSchema" in doc["definitions"]
properties = doc["definitions"]["DefaultErrorSchema"]["properties"]
assert "message" in properties
assert "details" in properties
assert "code" in properties
| 35.158621
| 99
| 0.636132
| 532
| 5,098
| 5.853383
| 0.156015
| 0.032113
| 0.033398
| 0.020873
| 0.814066
| 0.808285
| 0.783558
| 0.755941
| 0.745022
| 0.697174
| 0
| 0.004119
| 0.238133
| 5,098
| 144
| 100
| 35.402778
| 0.797631
| 0.013142
| 0
| 0.669643
| 0
| 0
| 0.136989
| 0.037886
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.089286
| false
| 0.169643
| 0.0625
| 0
| 0.205357
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
09b946d5fd09ddfef5a17466babf979ed0dc8f6c
| 8,032
|
py
|
Python
|
src/restApi/updates/api/views.py
|
nightwarriorftw/restApi
|
41bb572f55d27fbdb104a3624a15cbb959b92cf7
|
[
"MIT"
] | null | null | null |
src/restApi/updates/api/views.py
|
nightwarriorftw/restApi
|
41bb572f55d27fbdb104a3624a15cbb959b92cf7
|
[
"MIT"
] | 10
|
2020-06-05T20:12:44.000Z
|
2022-03-12T00:08:39.000Z
|
src/restApi/updates/api/views.py
|
nightwarriorftw/restApi
|
41bb572f55d27fbdb104a3624a15cbb959b92cf7
|
[
"MIT"
] | null | null | null |
import json
from updates.models import Updates as UpdatesModel
from updates.forms import UpdatesModelForm
from django.views.generic import View
from django.http import HttpResponse
from restApi.mixins import HttpResponseMixins
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from .mixins import CSRFExemptMixins
from .utils import is_json
class UpdatesApiListView(HttpResponseMixins, CSRFExemptMixins, View):
is_json = True
def get_queryset(self):
qs = UpdatesModel.objects.all()
self.queryset = qs
return qs
def get_object(self, id=None):
if id is None:
return None
qs = self.get_queryset().filter(id=id)
if qs.count() == 1:
return qs.first()
else:
return None
def get(self, request, *args, **kwargs):
# data = json.loads(request.body)
# if data == "{}":
# passed_id = None
# else:
# passed_id = data.get("id")
# if passed_id is not None:
# obj = self.get_object(id=passed_id)
# if obj is None:
# json_data = json.dumps({"message": "Update not found !!!"})
# return self.render_to_response(json_data, status=404)
# json_data = obj.serialize()
# return self.render_to_response(json_data)
# else:
qs = self.get_queryset()
json_data = qs.serialize()
return self.render_to_response(json_data)
def post(self, request, *args, **kwargs):
valid_json = is_json(request.body)
if not valid_json:
json_data = json.dumps(
{"message": "The entry is not in JSON format. Please enter valid JSON format."})
return self.render_to_response(json_data, status=400)
data = json.loads(request.body)
print(data)
form = UpdatesModelForm(data)
if form.is_valid():
obj = form.save(commit=True)
json_data = obj.serialize()
return self.render_to_response(json_data, status=202)
if form.errors:
json_data = json.dumps(form.errors)
return self.render_to_response(json_data, status=404)
json_data = json.dumps({
"message": "Something went wrong please try again"
})
return self.render_to_response(json_data, status=403)
def put(self, request, *args, **kwargs):
valid_json = is_json(request.body)
if not valid_json:
json_data = json.dumps(
{"message": "The entry is not in JSON format. Please enter valid JSON format."})
return self.render_to_response(json_data, status=400)
passed_data = json.loads(request.body)
passed_id = passed_data.get("id")
obj = self.get_object(id=passed_id)
if obj is None:
json_data = json.dumps({"message": "The entry does not exist"})
return self.render_to_response(json_data, status=404)
old_data = json.loads(obj.serialize())
passed_data = json.loads(request.body)
if passed_id is None:
json_data = json.dumps(
{"id": "ID is a must required field to do this operation"})
return self.render_to_response(json_data, status=400)
for key, value in passed_data.items():
old_data[key] = value
form = UpdatesModelForm(old_data, instance=obj)
if form.is_valid():
obj = form.save(commit=False)
json_data = json.dumps(old_data)
return self.render_to_response(json_data, status=201)
if form.errors:
json_data = json.dumps(form.errors)
return self.render_to_response(json_data, status=400)
json_data = json.dumps({"message": "Something is happening !!"})
return self.render_to_response(json_data, status=404)
def delete(self, request, *args, **kwargs):
valid_json = is_json(request.body)
if not valid_json:
json_data = json.dumps(
{"message": "The entry is not in JSON format. Please enter valid JSON format."})
return self.render_to_response(json_data, status=400)
passed_data = json.loads(request.body)
passed_id = passed_data.get("id", None)
if passed_id is None:
json_data = json.dumps(
{"id": "ID is a must required field to do this operation"})
return self.render_to_response(json_data, status=400)
obj = self.get_object(id=passed_id)
if obj is None:
json_data = json.dumps({"message": "Please send the correct ID."})
return self.render_to_response(json_data, status=404)
deleted_obj, updates_stats = obj.delete()
if deleted_obj is 1:
json_data = json.dumps(
{"message": "Entity with ID {id} deleted".format(id=id)})
return self.render_to_response(json_data, status=200)
json_data = json.dumps({
"message": "This is a delete request, but something went wrong !!!"
})
return self.render_to_response(json_data, status=403)
class UpdatesApiDetailView(HttpResponseMixins, CSRFExemptMixins, View):
is_json = True
def get_object(self, id=None):
qs = UpdatesModel.objects.filter(id=id)
if qs.count() == 1:
return qs.first()
else:
return None
def get(self, request, id, *args, **kwargs):
obj = self.get_object(id=id)
if obj is None:
json_data = json.dumps({"message": "The entry does not exist"})
return self.render_to_response(json_data, status=400)
json_data = obj.serialize()
return self.render_to_response(json_data)
def post(self, request, id, *args, **kwargs):
json_data = json.dumps({
"message": "This method is not allowed. Use /api/updates/ method."
})
return self.render_to_response(json_data, status=400)
def put(self, request, id, *args, **kwargs):
valid_json = is_json(request.body)
if not valid_json:
json_data = json.dumps(
{"message": "The entry is not in JSON format. Please enter valid JSON format."})
return self.render_to_response(json_data, status=400)
obj = self.get_object(id=id)
if obj is None:
json_data = json.dumps({"message": "The entry does not exist"})
return self.render_to_response(json_data, status=404)
old_data = json.loads(obj.serialize())
passed_data = json.loads(request.body)
for key, value in passed_data.items():
old_data[key] = value
print(old_data, passed_data)
form = UpdatesModelForm(old_data, instance=obj)
if form.is_valid():
obj = form.save(commit=False)
json_data = json.dumps(old_data)
return self.render_to_response(json_data, status=201)
if form.errors:
json_data = json.dumps(form.errors)
return self.render_to_response(json_data, status=400)
json_data = json.dumps({"message": "Something is happening !!"})
return self.render_to_response(json_data, status=404)
def delete(self, request, id, *args, **kwargs):
obj = self.get_object(id=id)
if obj is None:
json_data = json.dumps({"message": "Please send the correct ID."})
return self.render_to_response(json_data, status=404)
deleted_obj, updates_stats = obj.delete()
if deleted_obj is 1:
json_data = json.dumps(
{"message": "Entity with ID {id} deleted".format(id=id)})
return self.render_to_response(json_data, status=200)
json_data = json.dumps({
"message": "This is a delete request, but something went wrong !!!"
})
return self.render_to_response(json_data, status=403)
| 35.857143
| 96
| 0.611429
| 1,034
| 8,032
| 4.574468
| 0.11412
| 0.098097
| 0.098097
| 0.110359
| 0.840592
| 0.830233
| 0.805497
| 0.805497
| 0.775053
| 0.756871
| 0
| 0.014298
| 0.285981
| 8,032
| 223
| 97
| 36.017937
| 0.810462
| 0.050548
| 0
| 0.759259
| 0
| 0
| 0.119154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067901
| false
| 0.080247
| 0.061728
| 0
| 0.358025
| 0.012346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
1120fa03cbd554dd0014361c35c4dfab82cb5cfb
| 161
|
py
|
Python
|
workstation_app/admin.py
|
berserg2010/network_audit
|
62ba8dd0839c648547af75a402042758eb96f1bb
|
[
"Apache-2.0"
] | null | null | null |
workstation_app/admin.py
|
berserg2010/network_audit
|
62ba8dd0839c648547af75a402042758eb96f1bb
|
[
"Apache-2.0"
] | 4
|
2021-03-19T03:54:41.000Z
|
2021-06-10T19:19:09.000Z
|
workstation_app/admin.py
|
berserg2010/network_audit
|
62ba8dd0839c648547af75a402042758eb96f1bb
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import list_class_models
@admin.register(*list_class_models)
class WorkstationAdmin(admin.ModelAdmin):
pass
| 17.888889
| 41
| 0.813665
| 21
| 161
| 6.047619
| 0.571429
| 0.141732
| 0.23622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118012
| 161
| 8
| 42
| 20.125
| 0.894366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
113fe0d6753d0f71d7cc170a8d2efc712ba361e9
| 181
|
py
|
Python
|
adeskForgeWrapper/AFWExceptions.py
|
GastonBC/adeskForgeWrapper
|
ae80e2c353d6b817de736f0b4bcddc35c9b2916f
|
[
"MIT"
] | 5
|
2020-04-14T17:54:08.000Z
|
2020-12-15T17:00:17.000Z
|
adeskForgeWrapper/AFWExceptions.py
|
GastonBC/adeskForgeWrapper
|
ae80e2c353d6b817de736f0b4bcddc35c9b2916f
|
[
"MIT"
] | null | null | null |
adeskForgeWrapper/AFWExceptions.py
|
GastonBC/adeskForgeWrapper
|
ae80e2c353d6b817de736f0b4bcddc35c9b2916f
|
[
"MIT"
] | null | null | null |
'''Forge and AFW exceptions'''
class AFWError(Exception):
'''Base class for AFW exceptions'''
class APIError(Exception):
'''Base class for API response exceptions'''
| 25.857143
| 48
| 0.679558
| 21
| 181
| 5.857143
| 0.571429
| 0.211382
| 0.292683
| 0.341463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.187845
| 181
| 7
| 48
| 25.857143
| 0.836735
| 0.513812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
115f84b421fea51d9679183409a98782f9c885f4
| 1,022
|
py
|
Python
|
test/test_convert/test_temp_folder.py
|
etcher-be/EDLM
|
7b25c85252fd15c2c222b00271f7a32e335db704
|
[
"MIT"
] | null | null | null |
test/test_convert/test_temp_folder.py
|
etcher-be/EDLM
|
7b25c85252fd15c2c222b00271f7a32e335db704
|
[
"MIT"
] | 4
|
2020-03-24T16:53:26.000Z
|
2020-06-26T08:31:13.000Z
|
test/test_convert/test_temp_folder.py
|
etcher-be/EDLM
|
7b25c85252fd15c2c222b00271f7a32e335db704
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import pytest
from edlm.convert._temp_folder import Context, TempDir
def test_temp_dir():
ctx = Context()
assert ctx.temp_dir is None
with TempDir(ctx):
assert ctx.temp_dir.is_dir()
assert ctx.temp_dir.exists()
temp_dir = ctx.temp_dir
assert ctx.temp_dir is None
assert not temp_dir.exists()
def test_temp_dir_keep():
ctx = Context()
ctx.keep_temp_dir = True
assert ctx.temp_dir is None
with TempDir(ctx):
assert ctx.temp_dir.is_dir()
assert ctx.temp_dir.exists()
temp_dir = ctx.temp_dir
assert ctx.temp_dir == temp_dir
assert temp_dir.exists()
def test_temp_dir_with_error():
ctx = Context()
assert ctx.temp_dir is None
with pytest.raises(TypeError):
with TempDir(ctx):
assert ctx.temp_dir.is_dir()
assert ctx.temp_dir.exists()
temp_dir = ctx.temp_dir
raise TypeError()
assert ctx.temp_dir == temp_dir
assert temp_dir.exists()
| 22.711111
| 54
| 0.649706
| 151
| 1,022
| 4.139073
| 0.18543
| 0.3024
| 0.24
| 0.3072
| 0.7344
| 0.7344
| 0.7248
| 0.6592
| 0.6592
| 0.5856
| 0
| 0.001326
| 0.262231
| 1,022
| 44
| 55
| 23.227273
| 0.827586
| 0.011742
| 0
| 0.71875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.46875
| 1
| 0.09375
| false
| 0
| 0.0625
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
feff97a8484ceb2f7bfe2fef09bcf4e09c7a71e8
| 15,418
|
py
|
Python
|
model.py
|
suleymanaslan/animation-gan
|
97d5d0850a09a3ef30308408be034f6dfd145acd
|
[
"MIT"
] | null | null | null |
model.py
|
suleymanaslan/animation-gan
|
97d5d0850a09a3ef30308408be034f6dfd145acd
|
[
"MIT"
] | null | null | null |
model.py
|
suleymanaslan/animation-gan
|
97d5d0850a09a3ef30308408be034f6dfd145acd
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight.data, 1.0, 0.02)
nn.init.constant_(m.bias.data, 0)
class Generator2D(nn.Module):
def __init__(self, latent_size=100, input_channels=108, feature_map_size=64):
super(Generator2D, self).__init__()
net_channels = [latent_size,
feature_map_size*8,
feature_map_size*4,
feature_map_size*2,
feature_map_size*1,
input_channels]
# self.main = nn.Sequential(
# nn.ConvTranspose2d(net_channels[0], net_channels[1], 4, 1, 0, bias=False), nn.BatchNorm2d(net_channels[1]), nn.ReLU(True),
# nn.ConvTranspose2d(net_channels[1], net_channels[2], 4, 2, 1, bias=False), nn.BatchNorm2d(net_channels[2]), nn.ReLU(True),
# nn.ConvTranspose2d(net_channels[2], net_channels[3], 4, 2, 1, bias=False), nn.BatchNorm2d(net_channels[3]), nn.ReLU(True),
# nn.ConvTranspose2d(net_channels[3], net_channels[4], 4, 2, 1, bias=False), nn.BatchNorm2d(net_channels[4]), nn.ReLU(True),
# nn.ConvTranspose2d(net_channels[4], net_channels[5], 4, 2, 1, bias=False), nn.Tanh()
# )
self.main = nn.Sequential(
nn.ConvTranspose2d(net_channels[0], net_channels[1], 4, 1, 0), nn.BatchNorm2d(net_channels[1]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[1], net_channels[2], 4, 2, 1), nn.BatchNorm2d(net_channels[2]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[2], net_channels[3], 4, 2, 1), nn.BatchNorm2d(net_channels[3]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[3], net_channels[4], 4, 2, 1), nn.BatchNorm2d(net_channels[4]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[4], net_channels[5], 4, 2, 1), nn.Tanh()
)
def forward(self, input):
return self.main(input)
class Generator2D_10(nn.Module):
def __init__(self, latent_size=100, input_channels=108, feature_map_size=64):
super(Generator2D_10, self).__init__()
net_channels = [latent_size,
feature_map_size*18,
feature_map_size*16,
feature_map_size*14,
feature_map_size*12,
feature_map_size*10,
feature_map_size*8,
feature_map_size*6,
feature_map_size*4,
feature_map_size*2,
input_channels]
self.main = nn.Sequential(
nn.ConvTranspose2d(net_channels[0], net_channels[1], 2, 1, padding=0, output_padding=0), nn.BatchNorm2d(net_channels[1]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[1], net_channels[2], 3, 1, padding=0, output_padding=0), nn.BatchNorm2d(net_channels[2]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[2], net_channels[3], 3, 2, padding=1, output_padding=1), nn.BatchNorm2d(net_channels[3]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[3], net_channels[4], 3, 1, padding=1, output_padding=0), nn.BatchNorm2d(net_channels[4]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[4], net_channels[5], 3, 2, padding=1, output_padding=1), nn.BatchNorm2d(net_channels[5]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[5], net_channels[6], 3, 1, padding=1, output_padding=0), nn.BatchNorm2d(net_channels[6]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[6], net_channels[7], 5, 2, padding=2, output_padding=1), nn.BatchNorm2d(net_channels[7]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[7], net_channels[8], 5, 1, padding=2, output_padding=0), nn.BatchNorm2d(net_channels[8]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[8], net_channels[9], 7, 2, padding=3, output_padding=1), nn.BatchNorm2d(net_channels[9]), nn.ReLU(True),
nn.ConvTranspose2d(net_channels[9], net_channels[10], 7, 1, padding=3, output_padding=0), nn.Tanh()
)
def forward(self, input):
return self.main(input)
class Generator3D(nn.Module):
def __init__(self, latent_size=100, input_channels=3, feature_map_size=64):
super(Generator3D, self).__init__()
net_channels = [latent_size,
feature_map_size*8,
feature_map_size*4,
feature_map_size*2,
feature_map_size*1,
input_channels]
self.main = nn.Sequential(
nn.ConvTranspose3d(net_channels[0], net_channels[1], (2, 4, 4), 1, 0, bias=False), nn.BatchNorm3d(net_channels[1]), nn.ReLU(True),
nn.ConvTranspose3d(net_channels[1], net_channels[2], (2, 4, 4), 2, (0, 1, 1), bias=False), nn.BatchNorm3d(net_channels[2]), nn.ReLU(True),
nn.ConvTranspose3d(net_channels[2], net_channels[3], (2, 4, 4), 2, (0, 1, 1), bias=False, output_padding=(1, 0, 0)), nn.BatchNorm3d(net_channels[3]), nn.ReLU(True),
nn.ConvTranspose3d(net_channels[3], net_channels[4], (2, 4, 4), 2, (0, 1, 1), bias=False), nn.BatchNorm3d(net_channels[4]), nn.ReLU(True),
nn.ConvTranspose3d(net_channels[4], net_channels[5], (2, 4, 4), 2, (0, 1, 1), bias=False), nn.Tanh()
)
def forward(self, input):
return self.main(input)
class Discriminator2D(nn.Module):
def __init__(self, input_channels=108, output_channels=1, feature_map_size=64, groups=1):
super(Discriminator2D, self).__init__()
net_channels = [input_channels,
feature_map_size*1,
feature_map_size*2,
feature_map_size*4,
feature_map_size*8,
output_channels]
if groups == 1:
assert output_channels == 1
# self.main = nn.Sequential(
# nn.Conv2d(net_channels[0], net_channels[1], 4, 2, 1, bias=False), nn.LeakyReLU(0.2, inplace=True),
# nn.Conv2d(net_channels[1], net_channels[2], 4, 2, 1, bias=False), nn.BatchNorm2d(net_channels[2]), nn.LeakyReLU(0.2, inplace=True),
# nn.Conv2d(net_channels[2], net_channels[3], 4, 2, 1, bias=False), nn.BatchNorm2d(net_channels[3]), nn.LeakyReLU(0.2, inplace=True),
# nn.Conv2d(net_channels[3], net_channels[4], 4, 2, 1, bias=False), nn.BatchNorm2d(net_channels[4]), nn.LeakyReLU(0.2, inplace=True),
# nn.Conv2d(net_channels[4], net_channels[5], 4, 1, 0, bias=False), nn.Sigmoid()
# )
self.main = nn.Sequential(
nn.Conv2d(net_channels[0], net_channels[1], 4, 2, 1), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[1], net_channels[2], 4, 2, 1), nn.BatchNorm2d(net_channels[2]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[2], net_channels[3], 4, 2, 1), nn.BatchNorm2d(net_channels[3]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[3], net_channels[4], 4, 2, 1), nn.BatchNorm2d(net_channels[4]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[4], net_channels[5], 4, 1, 0), nn.Sigmoid()
)
else:
self.main = nn.Sequential(
nn.Conv2d(net_channels[0], net_channels[1], 4, 2, 1, groups=groups, bias=False), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[1], net_channels[2], 4, 2, 1, groups=groups, bias=False), nn.BatchNorm2d(net_channels[2]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[2], net_channels[3], 4, 2, 1, groups=groups, bias=False), nn.BatchNorm2d(net_channels[3]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[3], net_channels[4], 4, 2, 1, groups=groups, bias=False), nn.BatchNorm2d(net_channels[4]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[4], net_channels[5], 4, 1, 0, groups=groups, bias=False), nn.BatchNorm2d(net_channels[5]), nn.LeakyReLU(0.2, inplace=True),
nn.Flatten(), nn.Linear(net_channels[5], 1), nn.Sigmoid()
)
def forward(self, input):
return self.main(input)
class Discriminator2D_10(nn.Module):
def __init__(self, input_channels=108, output_channels=1, feature_map_size=64):
super(Discriminator2D_10, self).__init__()
net_channels = [input_channels,
feature_map_size*1,
feature_map_size*2,
feature_map_size*4,
feature_map_size*6,
feature_map_size*8,
feature_map_size*10,
feature_map_size*12,
feature_map_size*14,
feature_map_size*16,
output_channels]
self.main = nn.Sequential(
nn.Conv2d(net_channels[0], net_channels[1], 7, 1, 3), nn.BatchNorm2d(net_channels[1]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[1], net_channels[2], 7, 2, 3), nn.BatchNorm2d(net_channels[2]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[2], net_channels[3], 5, 1, 2), nn.BatchNorm2d(net_channels[3]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[3], net_channels[4], 5, 2, 2), nn.BatchNorm2d(net_channels[4]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[4], net_channels[5], 3, 1, 1), nn.BatchNorm2d(net_channels[5]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[5], net_channels[6], 3, 2, 1), nn.BatchNorm2d(net_channels[6]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[6], net_channels[7], 3, 1, 1), nn.BatchNorm2d(net_channels[7]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[7], net_channels[8], 3, 2, 1), nn.BatchNorm2d(net_channels[8]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[8], net_channels[9], 3, 1, 0), nn.BatchNorm2d(net_channels[9]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[9], net_channels[10], 2, 1, 0), nn.Sigmoid()
)
def forward(self, input):
return self.main(input)
class DiscriminatorTemporal(nn.Module):
def __init__(self, input_channels=3, output_channels=1, feature_map_size=32, batch_size=8, sequence_length=36):
super(DiscriminatorTemporal, self).__init__()
self.batch_size = batch_size
self.sequence_length = sequence_length
self.feature_map_size = feature_map_size
net_channels = [input_channels,
feature_map_size*1,
feature_map_size*2,
feature_map_size*4,
feature_map_size*8,
feature_map_size*16,
output_channels]
self.main = nn.Sequential(
nn.Conv2d(net_channels[0], net_channels[1], 4, 2, 1), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[1], net_channels[2], 4, 2, 1), nn.BatchNorm2d(net_channels[2]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[2], net_channels[3], 4, 2, 1), nn.BatchNorm2d(net_channels[3]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[3], net_channels[4], 4, 2, 1), nn.BatchNorm2d(net_channels[4]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[4], net_channels[5], 4, 1, 0), nn.BatchNorm2d(net_channels[5]), nn.LeakyReLU(0.2, inplace=True),
nn.Flatten()
)
self.temporal = nn.Sequential(
nn.Conv1d(net_channels[5], net_channels[4], 4, 2, 1), nn.LeakyReLU(0.2, inplace=True),
nn.Conv1d(net_channels[4], net_channels[3], 4, 2, 1), nn.BatchNorm1d(net_channels[3]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv1d(net_channels[3], net_channels[2], 4, 2, 1), nn.BatchNorm1d(net_channels[2]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv1d(net_channels[2], net_channels[6], 4, 1, 0), nn.Sigmoid()
)
def forward(self, input):
conv_out = self.main(input)
conv_out = conv_out.reshape(self.batch_size, self.sequence_length, self.feature_map_size*16).permute(0, 2, 1)
temporal_out = self.temporal(conv_out)
return temporal_out
class DiscriminatorSheet(nn.Module):
def __init__(self, input_channels=3, output_channels=1, feature_map_size=64):
super(DiscriminatorSheet, self).__init__()
net_channels = [input_channels,
feature_map_size*1,
feature_map_size*2,
feature_map_size*4,
feature_map_size*8,
feature_map_size*16,
feature_map_size*32,
output_channels]
self.main = nn.Sequential(
nn.Conv2d(net_channels[0], net_channels[1], 5, (2, 3), 0, bias=False), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[1], net_channels[2], 5, (2, 3), 0, bias=False), nn.BatchNorm2d(net_channels[2]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[2], net_channels[3], 5, 2, 0, bias=False), nn.BatchNorm2d(net_channels[3]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[3], net_channels[4], 5, 2, 0, bias=False), nn.BatchNorm2d(net_channels[4]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[4], net_channels[5], 3, 2, 0, bias=False), nn.BatchNorm2d(net_channels[5]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[5], net_channels[6], 3, 2, 0, bias=False), nn.BatchNorm2d(net_channels[6]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(net_channels[6], net_channels[7], 2, 1, 0, bias=False), nn.Sigmoid()
)
def forward(self, input):
return self.main(input)
class Discriminator3D(nn.Module):
def __init__(self, input_channels=3, output_channels=1, feature_map_size=64):
super(Discriminator3D, self).__init__()
net_channels = [input_channels,
feature_map_size*1,
feature_map_size*2,
feature_map_size*4,
feature_map_size*8,
output_channels]
self.main = nn.Sequential(
nn.Conv3d(net_channels[0], net_channels[1], (2, 4, 4), 2, (0, 1, 1), bias=False), nn.LeakyReLU(0.2, inplace=True),
nn.Conv3d(net_channels[1], net_channels[2], (2, 4, 4), 2, (0, 1, 1), bias=False), nn.BatchNorm3d(net_channels[2]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv3d(net_channels[2], net_channels[3], (2, 4, 4), 2, (0, 1, 1), bias=False), nn.BatchNorm3d(net_channels[3]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv3d(net_channels[3], net_channels[4], (2, 4, 4), 2, (0, 1, 1), bias=False), nn.BatchNorm3d(net_channels[4]), nn.LeakyReLU(0.2, inplace=True),
nn.Conv3d(net_channels[4], net_channels[5], (2, 4, 4), 1, 0, bias=False), nn.Sigmoid()
)
def forward(self, input):
return self.main(input)
| 62.169355
| 176
| 0.604034
| 2,168
| 15,418
| 4.085793
| 0.041974
| 0.254572
| 0.088508
| 0.121924
| 0.921088
| 0.909009
| 0.880673
| 0.784601
| 0.772861
| 0.75604
| 0
| 0.070454
| 0.2488
| 15,418
| 247
| 177
| 62.421053
| 0.694353
| 0.089506
| 0
| 0.472637
| 0
| 0
| 0.000928
| 0
| 0
| 0
| 0
| 0
| 0.004975
| 1
| 0.084577
| false
| 0
| 0.004975
| 0.034826
| 0.169154
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
28c834af99c46295e3e514f7543fac9e4ed1b274
| 5,653
|
py
|
Python
|
ui/Pytest/test_CounterfactualInterfaceModel.py
|
MoisesHenr/OCEAN
|
e99c853893adc89652794ace62fcc8ffa78aa7ac
|
[
"MIT"
] | 15
|
2021-06-15T13:48:03.000Z
|
2022-01-26T13:51:46.000Z
|
ui/Pytest/test_CounterfactualInterfaceModel.py
|
MoisesHenr/OCEAN
|
e99c853893adc89652794ace62fcc8ffa78aa7ac
|
[
"MIT"
] | 1
|
2021-07-04T02:58:29.000Z
|
2021-07-04T02:58:29.000Z
|
ui/Pytest/test_CounterfactualInterfaceModel.py
|
MoisesHenr/OCEAN
|
e99c853893adc89652794ace62fcc8ffa78aa7ac
|
[
"MIT"
] | 2
|
2021-06-21T20:44:01.000Z
|
2021-06-23T11:10:56.000Z
|
# Author: Moises Henrique Pereira
# this class handle the model's functions tests
import pytest
from ui.mainTest import StaticObjects
# the function openChosenDataset expect a str type
# send another type as parameter would arrise an assertionError
@pytest.mark.parametrize('chosenDataset', [1, 2.9, ['str'], False, ('t1', 't2'), None])
def test_CIM_openChosenDataset_wrong_parameter(chosenDataset):
with pytest.raises(AssertionError):
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset(chosenDataset)
# the function openChosenDataset expect a not empty str type
# send an empty str type as parameter would arrise an assertionError
def test_CIM_openChosenDataset_empty_str():
with pytest.raises(AssertionError):
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('')
# the function openChosenDataset expect a not empty str type
# a valid str would not arrise assertionError
def test_CIM_openChosenDataset_right_parameter():
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('German-Credit')
# the function transformDataPoint expect a not empty array
# send none as data point would arrise an assertionError
def test_CIM_transformDataPoint_none_parameter():
with pytest.raises(AssertionError):
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.transformDataPoint(None)
# the function transformDataPoint expect a not empty array, and valid length
# send an invalid length as data point would arrise an assertionError
def test_CIM_transformDataPoint_wrong_length():
with pytest.raises(AssertionError):
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('German-Credit')
counterfactualInterfaceModel.transformDataPoint([67,'male',2,'own',0,1,1169,6])
# the function transformDataPoint expect a not empty array, and it with valid length
# would not arrise assertionError
def test_CIM_transformDataPoint_right_parameter():
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('German-Credit')
counterfactualInterfaceModel.transformDataPoint([67,'male',2,'own',0,1,1169,6,'radio/TV'])
# the function transformSingleNumericalValue expect a string as feature name, and a number
# send another type as feature would arrise assertionError
@pytest.mark.parametrize('feature', [1, 2.9, False, ('t1', 't2'), None])
def test_CIM_transformSingleNumericalValue_feature_wrong_type(feature):
with pytest.raises(AssertionError):
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('German-Credit')
counterfactualInterfaceModel.transformSingleNumericalValue(feature, 67)
# the function transformSingleNumericalValue expect a string as feature name, and a number
# send none as value would arrise assertionError
def test_CIM_transformSingleNumericalValue_value_none_parameter():
with pytest.raises(AssertionError):
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('German-Credit')
counterfactualInterfaceModel.transformSingleNumericalValue('Age', None)
# the function transformSingleNumericalValue expect a string as feature name, and a number
# send these would not arrise assertionError
def test_CIM_transformSingleNumericalValue_right_parameters():
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('German-Credit')
counterfactualInterfaceModel.transformSingleNumericalValue('Age', 67)
# invertTransformedDataPoint(self, transformedDataPoint)
# the function invertTransformedDataPoint expect a not empty array
# send none as data point would arrise an assertionError
def test_CIM_invertTransformedDataPoint_none_parameter():
with pytest.raises(AssertionError):
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.invertTransformedDataPoint(None)
# the function invertTransformedDataPoint expect a not empty array, and valid length
# send an invalid length as data point would arrise an assertionError
def test_CIM_invertTransformedDataPoint_wrong_length():
with pytest.raises(AssertionError):
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('German-Credit')
counterfactualInterfaceModel.transformDataPoint([0.14285714285714285, 1.0, 0.0, 0.6666666666666666, 0.09631891548633573, 0.0, 0.0, 1.0, -0.0, 1.0, -0.0, 1.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0])
# the function invertTransformedDataPoint expect a not empty array, and it with valid length
# would not arrise assertionError
def test_CIM_invertTransformedDataPoint_right_parameter():
counterfactualInterfaceModel = StaticObjects.staticCounterfactualInterfaceModel()
counterfactualInterfaceModel.openChosenDataset('German-Credit')
counterfactualInterfaceModel.invertTransformedDataPoint([0.14285714285714285, 1.0, 0.0, 0.6666666666666666, 0.09631891548633573, 0.0, 0.0, 1.0, -0.0, 1.0, -0.0, 1.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0])
| 58.278351
| 212
| 0.808067
| 565
| 5,653
| 7.99646
| 0.153982
| 0.020363
| 0.023904
| 0.023019
| 0.872731
| 0.84772
| 0.828243
| 0.780434
| 0.765826
| 0.615759
| 0
| 0.040513
| 0.11799
| 5,653
| 96
| 213
| 58.885417
| 0.865624
| 0.292234
| 0
| 0.509091
| 0
| 0
| 0.041027
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 1
| 0.218182
| false
| 0
| 0.036364
| 0
| 0.254545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e90d4c8b29827140fb23abfd7c59a4682d0203b5
| 478
|
py
|
Python
|
omoide/migration_engine/operations/__init__.py
|
TaXeH/Omoide
|
8ccc9d47e802433bb2de21ff930e6630658cd5e3
|
[
"MIT"
] | null | null | null |
omoide/migration_engine/operations/__init__.py
|
TaXeH/Omoide
|
8ccc9d47e802433bb2de21ff930e6630658cd5e3
|
[
"MIT"
] | null | null | null |
omoide/migration_engine/operations/__init__.py
|
TaXeH/Omoide
|
8ccc9d47e802433bb2de21ff930e6630658cd5e3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from omoide.migration_engine.operations import freeze
from omoide.migration_engine.operations import make_migrations
from omoide.migration_engine.operations import make_relocations
from omoide.migration_engine.operations import migrate
from omoide.migration_engine.operations import relocate
from omoide.migration_engine.operations import show_tree
from omoide.migration_engine.operations import sync
from omoide.migration_engine.operations import unite
| 47.8
| 63
| 0.870293
| 62
| 478
| 6.532258
| 0.306452
| 0.197531
| 0.375309
| 0.493827
| 0.82963
| 0.82963
| 0.222222
| 0
| 0
| 0
| 0
| 0.002268
| 0.077406
| 478
| 9
| 64
| 53.111111
| 0.9161
| 0.043933
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e91b8a6127a268f3b74fd3c4b6b5d04e77be165e
| 164
|
py
|
Python
|
roipoly/__init__.py
|
broald1/roipoly.py
|
1dba595342eb1557083557c7bcbeba6f479af328
|
[
"Apache-2.0"
] | 110
|
2015-04-11T00:38:25.000Z
|
2022-03-28T14:22:33.000Z
|
roipoly/__init__.py
|
broald1/roipoly.py
|
1dba595342eb1557083557c7bcbeba6f479af328
|
[
"Apache-2.0"
] | 31
|
2015-06-30T09:58:20.000Z
|
2021-11-22T10:01:31.000Z
|
roipoly/__init__.py
|
broald1/roipoly.py
|
1dba595342eb1557083557c7bcbeba6f479af328
|
[
"Apache-2.0"
] | 44
|
2015-03-20T16:07:51.000Z
|
2022-03-23T12:27:45.000Z
|
from .roipoly import RoiPoly, MultiRoi # noqa
from .roipoly import roipoly # noqa / for compatibility with old versions
from .version import __version__ # noqa
| 32.8
| 74
| 0.77439
| 21
| 164
| 5.857143
| 0.52381
| 0.178862
| 0.276423
| 0.390244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176829
| 164
| 4
| 75
| 41
| 0.911111
| 0.317073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e91dbb58fa3cfcc724b6594005752be6fa3f9317
| 201
|
py
|
Python
|
app_website/core_website/views.py
|
pdessauw/core-framework-demo
|
27d2c1704b92e40220e062c30a4f40ff1f9ce674
|
[
"MIT"
] | null | null | null |
app_website/core_website/views.py
|
pdessauw/core-framework-demo
|
27d2c1704b92e40220e062c30a4f40ff1f9ce674
|
[
"MIT"
] | null | null | null |
app_website/core_website/views.py
|
pdessauw/core-framework-demo
|
27d2c1704b92e40220e062c30a4f40ff1f9ce674
|
[
"MIT"
] | null | null | null |
from core_main.utils import render
def home_view(request):
return render(request, "core_website/home.html", {})
def help_view(request):
return render(request, "core_website/help.html", {})
| 20.1
| 56
| 0.726368
| 28
| 201
| 5.035714
| 0.5
| 0.156028
| 0.241135
| 0.326241
| 0.58156
| 0.58156
| 0.58156
| 0
| 0
| 0
| 0
| 0
| 0.139303
| 201
| 9
| 57
| 22.333333
| 0.815029
| 0
| 0
| 0
| 0
| 0
| 0.218905
| 0.218905
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
3ad4667605d1fd57f3c13006e73e14df7efe5ec7
| 2,570
|
py
|
Python
|
emailage/validation.py
|
bluefish6/Emailage_Python
|
40be83eeb940ba995eaad789f493caf64dd5f48d
|
[
"MIT"
] | 9
|
2017-10-25T22:43:23.000Z
|
2021-07-20T18:22:29.000Z
|
emailage/validation.py
|
bluefish6/Emailage_Python
|
40be83eeb940ba995eaad789f493caf64dd5f48d
|
[
"MIT"
] | 15
|
2018-02-20T21:57:39.000Z
|
2020-08-19T17:30:36.000Z
|
emailage/validation.py
|
bluefish6/Emailage_Python
|
40be83eeb940ba995eaad789f493caf64dd5f48d
|
[
"MIT"
] | 9
|
2017-04-04T07:46:12.000Z
|
2020-02-20T14:39:57.000Z
|
"""A module for arguments validation"""
import re
class Messages(object):
FRAUD_CODE_RANGE_FORMAT = "fraud_code must be an integer from 1 to {} corresponding to {}. {} is given."
FLAG_NOT_ALLOWED_FORMAT = "flag must be one of {}. {} is given."
def assert_email(email):
if not re.match(r'^[^@\s]+@([^@\s]+\.)+[^@\s]+$', email):
raise ValueError('{} is not a valid email address.'.format(email))
def assert_ip(ip):
ipv4_re = r'^([3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2})((\.([3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0)){3})$'
ipv6_re = r'^(?:(?:[0-9A-Fa-f]{1,4}:){6}' + \
'(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}' + \
'(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}' + \
'(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}' + \
'(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}' + \
'(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}' + \
'(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::' + \
'(?:[0-9A-Fa-f]{1,4}:){3}' + \
'(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}' + \
'(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::' + \
'(?:[0-9A-Fa-f]{1,4}:){2}' + \
'(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}' + \
'(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::' + \
'[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}' + \
'(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::' + \
'(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}' + \
'(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::' + \
'[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)$'
if not re.match(ipv4_re, ip) and not re.match(ipv6_re, ip):
raise ValueError('{} is not a valid IP address.'.format(ip))
| 65.897436
| 134
| 0.377043
| 598
| 2,570
| 1.598662
| 0.093645
| 0.117155
| 0.177824
| 0.213389
| 0.664226
| 0.664226
| 0.609833
| 0.609833
| 0.609833
| 0.609833
| 0
| 0.2076
| 0.160311
| 2,570
| 38
| 135
| 67.631579
| 0.235403
| 0.01284
| 0
| 0.206897
| 0
| 0.551724
| 0.713157
| 0.644804
| 0
| 0
| 0
| 0
| 0.068966
| 1
| 0.068966
| false
| 0
| 0.034483
| 0
| 0.206897
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c941be08ab808f9b5d49465a036296fae7803528
| 119
|
py
|
Python
|
keg_mail/__init__.py
|
level12/Keg-Mail
|
67976e203830b38569570efc18fc412a72dfb66f
|
[
"BSD-3-Clause"
] | null | null | null |
keg_mail/__init__.py
|
level12/Keg-Mail
|
67976e203830b38569570efc18fc412a72dfb66f
|
[
"BSD-3-Clause"
] | 6
|
2020-03-10T21:12:53.000Z
|
2021-02-02T19:35:11.000Z
|
keg_mail/__init__.py
|
level12/Keg-Mail
|
67976e203830b38569570efc18fc412a72dfb66f
|
[
"BSD-3-Clause"
] | null | null | null |
from .content import * # noqa
from .plugin import * # noqa
from .views import * # noqa
from .model import * # noqa
| 23.8
| 30
| 0.663866
| 16
| 119
| 4.9375
| 0.4375
| 0.506329
| 0.531646
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 119
| 4
| 31
| 29.75
| 0.868132
| 0.159664
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c986a4ed808bc790c5e01338c8f8e12e16ca1851
| 3,166
|
py
|
Python
|
sk_typing/tree.py
|
thomasjpfan/sk_typing
|
e6aacfedbce44d7748cf7c49cd2b949952f2e427
|
[
"MIT"
] | 1
|
2021-02-19T20:57:36.000Z
|
2021-02-19T20:57:36.000Z
|
sk_typing/tree.py
|
thomasjpfan/sk_typing
|
e6aacfedbce44d7748cf7c49cd2b949952f2e427
|
[
"MIT"
] | null | null | null |
sk_typing/tree.py
|
thomasjpfan/sk_typing
|
e6aacfedbce44d7748cf7c49cd2b949952f2e427
|
[
"MIT"
] | null | null | null |
from typing import Optional
from typing import Union
from .typing import Literal
from .typing import RandomStateType
class DecisionTreeClassifier:
def __init__(
self,
criterion: Literal["gini", "entropy"] = "gini",
splitter: Literal["best", "random"] = "best",
max_depth: Optional[int] = None,
min_samples_split: Union[int, float] = 2,
min_samples_leaf: Union[int, float] = 1,
min_weight_fraction_leaf: float = 0.0,
max_features: Union[int, float, Literal["auto", "sqrt", "log2"]] = None,
random_state: RandomStateType = None,
max_leaf_nodes: Optional[int] = None,
min_impurity_decrease: float = 0.0,
min_impurity_split: Optional[float] = None,
class_weight: Union[dict, list, Literal["balanced"], None] = None,
presort: Union[bool, Literal["deprecated"]] = "deprecated",
ccp_alpha: float = 0.0,
):
...
class DecisionTreeRegressor:
def __init__(
self,
criterion: Literal["mse", "friedman_mse", "mae"] = "mse",
splitter: Literal["best", "random"] = "best",
max_depth: Optional[int] = None,
min_samples_split: Union[int, float] = 2,
min_samples_leaf: Union[int, float] = 1,
min_weight_fraction_leaf: float = 0.0,
max_features: Union[int, float, Literal["auto", "sqrt", "log2"], None] = None,
random_state: RandomStateType = None,
max_leaf_nodes: Optional[int] = None,
min_impurity_decrease: float = 0.0,
min_impurity_split: Optional[float] = None,
presort: Union[bool, Literal["deprecated"]] = "deprecated",
ccp_alpha: float = 0.0,
):
...
class ExtraTreeClassifier:
def __init__(
self,
criterion: Literal["gini", "entropy"] = "gini",
splitter: Literal["random", "best"] = "random",
max_depth: Optional[int] = None,
min_samples_split: Union[int, float] = 2,
min_samples_leaf: Union[int, float] = 1,
min_weight_fraction_leaf: float = 0.0,
max_features: Union[int, float, Literal["auto", "sqrt", "log2"], None] = "auto",
random_state: RandomStateType = None,
max_leaf_nodes: Optional[int] = None,
min_impurity_decrease: float = 0.0,
min_impurity_split: Optional[float] = None,
class_weight: Union[dict, list, Literal["balanced"], None] = None,
ccp_alpha: float = 0.0,
):
...
class ExtraTreeRegressor:
def __init__(
self,
criterion: Literal["mse", "friedman_mse", "mse"] = "mse",
splitter: Literal["random", "best"] = "random",
max_depth: Optional[int] = None,
min_samples_split: Union[int, float] = 2,
min_samples_leaf: Union[int, float] = 1,
min_weight_fraction_leaf: float = 0.0,
max_features: Union[int, float, Literal["auto", "sqrt", "log2"], None] = "auto",
random_state: RandomStateType = None,
min_impurity_decrease: float = 0.0,
min_impurity_split: Optional[float] = None,
max_leaf_nodes: Optional[int] = None,
ccp_alpha: float = 0.0,
):
...
| 36.813953
| 88
| 0.606759
| 365
| 3,166
| 5.021918
| 0.147945
| 0.052373
| 0.085106
| 0.06874
| 0.893071
| 0.893071
| 0.873977
| 0.859247
| 0.814512
| 0.814512
| 0
| 0.015332
| 0.25837
| 3,166
| 85
| 89
| 37.247059
| 0.765332
| 0
| 0
| 0.842105
| 0
| 0
| 0.077069
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.052632
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3192c19aff99e5c1f2293fd9687196e7855260d
| 346
|
py
|
Python
|
app/main/routes.py
|
Icoqu/SecretShare
|
1b0c25c3cc64803157499d2c62870254d32b3022
|
[
"MIT"
] | null | null | null |
app/main/routes.py
|
Icoqu/SecretShare
|
1b0c25c3cc64803157499d2c62870254d32b3022
|
[
"MIT"
] | 206
|
2020-05-23T18:44:20.000Z
|
2022-03-31T19:11:25.000Z
|
app/main/routes.py
|
Icoqu/SecretShare
|
1b0c25c3cc64803157499d2c62870254d32b3022
|
[
"MIT"
] | null | null | null |
from flask import redirect, url_for, render_template
from app.main import bp
from flask_babel import _
@bp.route("/help/")
def help_section():
return render_template('static/about.html')
@bp.route("/password-generator/")
def password_generator():
return render_template('static/password_generator.html', title=_('Password generator'))
| 26.615385
| 91
| 0.765896
| 46
| 346
| 5.543478
| 0.5
| 0.266667
| 0.156863
| 0.203922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106936
| 346
| 12
| 92
| 28.833333
| 0.825243
| 0
| 0
| 0
| 0
| 0
| 0.263006
| 0.086705
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| true
| 0.333333
| 0.333333
| 0.222222
| 0.777778
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
|
0
| 7
|
6e629e3aae2a89d5648f751c4ee9db2cf93be645
| 773
|
py
|
Python
|
rastervision2/pytorch_learner/__init__.py
|
alexchunet/raster-vision
|
76e2965557fc8380e2ffc4aa7ab1f5dc45f79033
|
[
"Apache-2.0"
] | 1
|
2020-05-27T07:07:58.000Z
|
2020-05-27T07:07:58.000Z
|
rastervision2/pytorch_learner/__init__.py
|
alexchunet/raster-vision
|
76e2965557fc8380e2ffc4aa7ab1f5dc45f79033
|
[
"Apache-2.0"
] | null | null | null |
rastervision2/pytorch_learner/__init__.py
|
alexchunet/raster-vision
|
76e2965557fc8380e2ffc4aa7ab1f5dc45f79033
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
import rastervision2.pipeline
from rastervision2.pytorch_learner.learner_config import *
from rastervision2.pytorch_learner.learner import *
from rastervision2.pytorch_learner.classification_learner_config import *
from rastervision2.pytorch_learner.classification_learner import *
from rastervision2.pytorch_learner.regression_learner_config import *
from rastervision2.pytorch_learner.regression_learner import *
from rastervision2.pytorch_learner.semantic_segmentation_learner_config import *
from rastervision2.pytorch_learner.semantic_segmentation_learner import *
from rastervision2.pytorch_learner.object_detection_learner_config import *
from rastervision2.pytorch_learner.object_detection_learner import *
def register_plugin(registry):
pass
| 42.944444
| 80
| 0.882277
| 87
| 773
| 7.517241
| 0.229885
| 0.259939
| 0.366972
| 0.474006
| 0.889908
| 0.842508
| 0.831804
| 0.376147
| 0
| 0
| 0
| 0.016713
| 0.071151
| 773
| 17
| 81
| 45.470588
| 0.89415
| 0.015524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.076923
| 0.846154
| 0
| 0.923077
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 9
|
6ebeef87c67bcd60b0f5e8f43c4b8b2f2516d681
| 45
|
py
|
Python
|
bumps/mystic/examples/simple.py
|
cyankaet/bumps
|
427d077fd95f2d9a09eeb8677d045547061cff42
|
[
"MIT"
] | 44
|
2015-03-28T06:48:43.000Z
|
2022-01-09T11:29:00.000Z
|
bumps/mystic/examples/simple.py
|
cyankaet/bumps
|
427d077fd95f2d9a09eeb8677d045547061cff42
|
[
"MIT"
] | 68
|
2015-08-21T11:28:54.000Z
|
2022-03-30T22:14:13.000Z
|
bumps/mystic/examples/simple.py
|
cyankaet/bumps
|
427d077fd95f2d9a09eeb8677d045547061cff42
|
[
"MIT"
] | 27
|
2015-06-22T19:25:27.000Z
|
2021-06-15T18:20:06.000Z
|
def f(x):
return (x[0]-3)**2+(x[1]-5)**2
| 15
| 34
| 0.422222
| 12
| 45
| 1.583333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 0.177778
| 45
| 2
| 35
| 22.5
| 0.351351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
6eea73fb7ed5ee211acca8255f8350e9891c3f7d
| 107
|
py
|
Python
|
legacy/src/__init__.py
|
zz090923610/OhMyLifeRecorder
|
1164f6cb57a56d315d79e071b7be923a90393bed
|
[
"MIT"
] | 2
|
2016-06-22T14:27:11.000Z
|
2016-06-22T14:27:18.000Z
|
legacy/src/__init__.py
|
zz090923610/OhMyLifeRecorder
|
1164f6cb57a56d315d79e071b7be923a90393bed
|
[
"MIT"
] | null | null | null |
legacy/src/__init__.py
|
zz090923610/OhMyLifeRecorder
|
1164f6cb57a56d315d79e071b7be923a90393bed
|
[
"MIT"
] | 1
|
2018-03-24T02:52:17.000Z
|
2018-03-24T02:52:17.000Z
|
from .alias_hdl import *
from .config_hdl import *
from .job_hdl import *
from .markdown_generator import *
| 26.75
| 33
| 0.785047
| 16
| 107
| 5
| 0.5
| 0.3375
| 0.4875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140187
| 107
| 4
| 33
| 26.75
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
42f954779785cc87b6c674ff85d321bee0e44aec
| 2,211
|
py
|
Python
|
tests/integration/unit_test/test_unit_test_java8_al2.py
|
aahung/aws-sam-cli-app-templates
|
fb44b0030d124e53ee4db42bc95240081e4dbbd8
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/unit_test/test_unit_test_java8_al2.py
|
aahung/aws-sam-cli-app-templates
|
fb44b0030d124e53ee4db42bc95240081e4dbbd8
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/unit_test/test_unit_test_java8_al2.py
|
aahung/aws-sam-cli-app-templates
|
fb44b0030d124e53ee4db42bc95240081e4dbbd8
|
[
"Apache-2.0"
] | null | null | null |
from unittest import skip
from tests.integration.base import Base
class UnitTest_java8_al2_cookiecutter_aws_sam_hello_java_gradle(Base.JavaUnitTestGradleBase):
directory = "java8.al2/cookiecutter-aws-sam-hello-java-gradle"
code_directories = ["HelloWorldFunction"]
class UnitTest_java8_al2_cookiecutter_aws_sam_hello_java_maven(Base.JavaUnitTestMavenBase):
directory = "java8.al2/cookiecutter-aws-sam-hello-java-maven"
code_directories = ["HelloWorldFunction"]
class UnitTest_java8_al2_cookiecutter_aws_sam_eventbridge_hello_java_gradle(Base.JavaUnitTestGradleBase):
directory = "java8.al2/cookiecutter-aws-sam-eventbridge-hello-java-gradle"
code_directories = ["HelloWorldFunction"]
class UnitTest_java8_al2_cookiecutter_aws_sam_eventbridge_hello_java_maven(Base.JavaUnitTestMavenBase):
directory = "java8.al2/cookiecutter-aws-sam-eventbridge-hello-java-maven"
code_directories = ["HelloWorldFunction"]
@skip("eventbridge schema app seems not be able to build")
class UnitTest_java8_al2_cookiecutter_aws_sam_eventbridge_schema_app_java_gradle(Base.JavaUnitTestGradleBase):
directory = "java8.al2/cookiecutter-aws-sam-eventbridge-schema-app-java-gradle"
code_directories = ["HelloWorldFunction"]
@skip("eventbridge schema app seems not be able to build")
class UnitTest_java8_al2_cookiecutter_aws_sam_eventbridge_schema_app_java_maven(Base.JavaUnitTestMavenBase):
directory = "java8.al2/cookiecutter-aws-sam-eventbridge-schema-app-java-maven"
code_directories = ["HelloWorldFunction"]
class UnitTest_java8_al2_cookiecutter_aws_sam_step_functions_sample_app_gradle(Base.JavaUnitTestGradleBase):
directory = "java8.al2/cookiecutter-aws-sam-hello-java-step-functions-sample-app-gradle"
code_directories = [
"functions/StockBuyer",
"functions/StockChecker",
"functions/StockSeller",
]
class UnitTest_java8_al2_cookiecutter_aws_sam_step_functions_sample_app_maven(Base.JavaUnitTestMavenBase):
directory = "java8.al2/cookiecutter-aws-sam-hello-java-step-functions-sample-app-maven"
code_directories = [
"functions/StockBuyer",
"functions/StockChecker",
"functions/StockSeller",
]
| 40.944444
| 110
| 0.804161
| 256
| 2,211
| 6.625
| 0.148438
| 0.075472
| 0.188679
| 0.216981
| 0.966981
| 0.960495
| 0.960495
| 0.960495
| 0.872052
| 0.840802
| 0
| 0.016194
| 0.106287
| 2,211
| 53
| 111
| 41.716981
| 0.842105
| 0
| 0
| 0.444444
| 0
| 0.055556
| 0.371777
| 0.260516
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.722222
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
6e1997e48d29f481d5701ca64c65a91a9ca3475e
| 11,703
|
py
|
Python
|
t/csi_test.py
|
doy/libvt100-python
|
1adf706513f097a545760b17badbbd132ed7d1b4
|
[
"MIT"
] | 1
|
2021-05-25T01:58:24.000Z
|
2021-05-25T01:58:24.000Z
|
t/csi_test.py
|
doy/libvt100-python
|
1adf706513f097a545760b17badbbd132ed7d1b4
|
[
"MIT"
] | null | null | null |
t/csi_test.py
|
doy/libvt100-python
|
1adf706513f097a545760b17badbbd132ed7d1b4
|
[
"MIT"
] | 1
|
2015-03-15T14:09:00.000Z
|
2015-03-15T14:09:00.000Z
|
from . import VT100Test
class CSITest(VT100Test):
def test_absolute_movement(self):
assert self.vt.cursor_position() == (0, 0)
self.process("\033[10;10H")
assert self.vt.cursor_position() == (9, 9)
self.process("\033[d")
assert self.vt.cursor_position() == (0, 9)
self.process("\033[15d")
assert self.vt.cursor_position() == (14, 9)
self.process("\033[H")
assert self.vt.cursor_position() == (0, 0)
self.process("\033[8H")
assert self.vt.cursor_position() == (7, 0)
self.process("\033[15G")
assert self.vt.cursor_position() == (7, 14)
self.process("\033[G")
assert self.vt.cursor_position() == (7, 0)
self.process("\033[0;0H")
assert self.vt.cursor_position() == (0, 0)
self.process("\033[1;1H")
assert self.vt.cursor_position() == (0, 0)
self.process("\033[500;500H")
assert self.vt.cursor_position() == (23, 79)
def test_relative_movement(self):
assert self.vt.cursor_position() == (0, 0)
self.process("\033[C")
assert self.vt.cursor_position() == (0, 1)
self.process("\033[C")
assert self.vt.cursor_position() == (0, 2)
self.process("\033[20C")
assert self.vt.cursor_position() == (0, 22)
self.process("\033[D")
assert self.vt.cursor_position() == (0, 21)
self.process("\033[D")
assert self.vt.cursor_position() == (0, 20)
self.process("\033[9D")
assert self.vt.cursor_position() == (0, 11)
self.process("\033[500C")
assert self.vt.cursor_position() == (0, 79)
self.process("\033[500D")
assert self.vt.cursor_position() == (0, 0)
self.process("\033[B")
assert self.vt.cursor_position() == (1, 0)
self.process("\033[B")
assert self.vt.cursor_position() == (2, 0)
self.process("\033[20B")
assert self.vt.cursor_position() == (22, 0)
self.process("\033[A")
assert self.vt.cursor_position() == (21, 0)
self.process("\033[A")
assert self.vt.cursor_position() == (20, 0)
self.process("\033[9A")
assert self.vt.cursor_position() == (11, 0)
self.process("\033[500B")
assert self.vt.cursor_position() == (23, 0)
self.process("\033[500A")
assert self.vt.cursor_position() == (0, 0)
def test_ed(self):
assert self.vt.window_contents() == ("\n" * 24)
self.process("foo\033[5;5Hbar\033[10;10Hbaz\033[20;20Hquux")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[10;12H\033[0J")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' ba' + ("\n" * 15)
self.process("\033[5;7H\033[1J")
assert self.vt.window_contents() == ("\n" * 4) + ' r' + ("\n" * 5) + ' ba' + ("\n" * 15)
self.process("\033[7;7H\033[2J")
assert self.vt.window_contents() == ("\n" * 24)
self.process("\033[2J\033[H")
assert self.vt.window_contents() == ("\n" * 24)
self.process("foo\033[5;5Hbar\033[10;10Hbaz\033[20;20Hquux")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[10;12H\033[J")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' ba' + ("\n" * 15)
self.process("\033[2J\033[H")
assert self.vt.window_contents() == ("\n" * 24)
self.process("foo\033[5;5Hbar\033[10;10Hbaz\033[20;20Hquux")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[10;12H\033[?0J")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' ba' + ("\n" * 15)
self.process("\033[5;7H\033[?1J")
assert self.vt.window_contents() == ("\n" * 4) + ' r' + ("\n" * 5) + ' ba' + ("\n" * 15)
self.process("\033[7;7H\033[?2J")
assert self.vt.window_contents() == ("\n" * 24)
self.process("\033[2J\033[H")
assert self.vt.window_contents() == ("\n" * 24)
self.process("foo\033[5;5Hbar\033[10;10Hbaz\033[20;20Hquux")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[10;12H\033[?J")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' ba' + ("\n" * 15)
def test_el(self):
assert self.vt.window_contents() == ("\n" * 24)
self.process("foo\033[5;5Hbarbar\033[10;10Hbazbaz\033[20;20Hquux")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' barbar' + ("\n" * 5) + ' bazbaz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[5;8H\033[0K")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' bazbaz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[10;13H\033[1K")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[20;22H\033[2K")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 15)
self.process("\033[1;2H\033[K")
assert self.vt.window_contents() == 'f' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 15)
self.process("\033[2J\033[H")
assert self.vt.window_contents() == ("\n" * 24)
self.process("foo\033[5;5Hbarbar\033[10;10Hbazbaz\033[20;20Hquux")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' barbar' + ("\n" * 5) + ' bazbaz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[5;8H\033[?0K")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' bazbaz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[10;13H\033[?1K")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 10) + ' quux' + ("\n" * 5)
self.process("\033[20;22H\033[?2K")
assert self.vt.window_contents() == 'foo' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 15)
self.process("\033[1;2H\033[?K")
assert self.vt.window_contents() == 'f' + ("\n" * 4) + ' bar' + ("\n" * 5) + ' baz' + ("\n" * 15)
def test_ich_dch_ech(self):
assert self.vt.window_contents() == ("\n" * 24)
self.process("\033[10;10Hfoobar")
assert self.vt.window_contents() == ("\n" * 9) + ' foobar' + ("\n" * 15)
self.process("\033[10;12H\033[3@")
assert self.vt.window_contents() == ("\n" * 9) + ' fo obar' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 11)
self.process("\033[4P")
assert self.vt.window_contents() == ("\n" * 9) + ' fobar' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 11)
self.process("\033[100@")
assert self.vt.window_contents() == ("\n" * 9) + ' fo' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 11)
self.process("obar")
assert self.vt.window_contents() == ("\n" * 9) + ' foobar' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 15)
self.process("\033[10;12H\033[100P")
assert self.vt.window_contents() == ("\n" * 9) + ' fo' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 11)
self.process("obar")
assert self.vt.window_contents() == ("\n" * 9) + ' foobar' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 15)
self.process("\033[10;13H\033[X")
assert self.vt.window_contents() == ("\n" * 9) + ' foo ar' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 12)
self.process("\033[10;11H\033[4X")
assert self.vt.window_contents() == ("\n" * 9) + ' f r' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 10)
self.process("\033[10;11H\033[400X")
assert self.vt.window_contents() == ("\n" * 9) + ' f' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 10)
def test_il_dl(self):
assert self.vt.window_contents() == ("\n" * 24)
self.process("\033[10;10Hfoobar\033[3D")
assert self.vt.window_contents() == ("\n" * 9) + ' foobar' + ("\n" * 15)
assert self.vt.cursor_position() == (9, 12)
self.process("\033[L")
assert self.vt.window_contents() == ("\n" * 10) + ' foobar' + ("\n" * 14)
assert self.vt.cursor_position() == (9, 12)
self.process("\033[3L")
assert self.vt.window_contents() == ("\n" * 13) + ' foobar' + ("\n" * 11)
assert self.vt.cursor_position() == (9, 12)
self.process("\033[500L")
assert self.vt.window_contents() == ("\n" * 24)
assert self.vt.cursor_position() == (9, 12)
self.process("\033[10;10Hfoobar\033[3D\033[6A")
assert self.vt.window_contents() == ("\n" * 9) + ' foobar' + ("\n" * 15)
assert self.vt.cursor_position() == (3, 12)
self.process("\033[M")
assert self.vt.window_contents() == ("\n" * 8) + ' foobar' + ("\n" * 16)
assert self.vt.cursor_position() == (3, 12)
self.process("\033[3M")
assert self.vt.window_contents() == ("\n" * 5) + ' foobar' + ("\n" * 19)
assert self.vt.cursor_position() == (3, 12)
self.process("\033[500M")
assert self.vt.window_contents() == ("\n" * 24)
assert self.vt.cursor_position() == (3, 12)
def test_scroll(self):
assert self.vt.window_contents() == ("\n" * 24)
self.process("1\r\n2\r\n3\r\n4\r\n5\r\n6\r\n7\r\n8\r\n9\r\n10\r\n11\r\n12\r\n13\r\n14\r\n15\r\n16\r\n17\r\n18\r\n19\r\n20\r\n21\r\n22\r\n23\r\n24")
assert self.vt.window_contents() == "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n"
self.process("\033[15;15H")
assert self.vt.cursor_position() == (14, 14)
self.vt.process("\033[S")
print(self.vt.window_contents().replace('\n', '\\n'))
assert self.vt.window_contents() == "2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n"
assert self.vt.cursor_position() == (14, 14)
self.vt.process("\033[3S")
assert self.vt.window_contents() == "5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n\n\n\n"
assert self.vt.cursor_position() == (14, 14)
self.vt.process("\033[T")
assert self.vt.window_contents() == "\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n\n\n"
assert self.vt.cursor_position() == (14, 14)
self.vt.process("\033[5T")
assert self.vt.window_contents() == "\n\n\n\n\n\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n"
assert self.vt.cursor_position() == (14, 14)
| 42.711679
| 165
| 0.492096
| 1,591
| 11,703
| 3.546197
| 0.094909
| 0.115916
| 0.221198
| 0.194966
| 0.892769
| 0.863346
| 0.794576
| 0.770826
| 0.749734
| 0.738391
| 0
| 0.12948
| 0.280014
| 11,703
| 273
| 166
| 42.868132
| 0.540114
| 0
| 0
| 0.484211
| 0
| 0.031579
| 0.237033
| 0.075194
| 0
| 0
| 0
| 0
| 0.547368
| 1
| 0.036842
| false
| 0
| 0.005263
| 0
| 0.047368
| 0.005263
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6e1e6f0a1431e2c218f998776bdd7a69f70d29f8
| 1,869
|
py
|
Python
|
project/editorial/migrations/0055_auto_20171120_2321.py
|
cojennin/facet
|
230e65316134b3399a35d40034728e61ba63cb2a
|
[
"MIT"
] | 25
|
2015-07-13T22:16:36.000Z
|
2021-11-11T02:45:32.000Z
|
project/editorial/migrations/0055_auto_20171120_2321.py
|
cojennin/facet
|
230e65316134b3399a35d40034728e61ba63cb2a
|
[
"MIT"
] | 74
|
2015-12-01T18:57:47.000Z
|
2022-03-11T23:25:47.000Z
|
project/editorial/migrations/0055_auto_20171120_2321.py
|
cojennin/facet
|
230e65316134b3399a35d40034728e61ba63cb2a
|
[
"MIT"
] | 6
|
2016-01-08T21:12:43.000Z
|
2019-05-20T16:07:56.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('editorial', '0054_auto_20171119_2334'),
]
operations = [
migrations.AlterField(
model_name='audioasset',
name='organization',
field=models.ForeignKey(blank=True, to='editorial.Organization', null=True),
),
migrations.AlterField(
model_name='documentasset',
name='organization',
field=models.ForeignKey(blank=True, to='editorial.Organization', null=True),
),
migrations.AlterField(
model_name='imageasset',
name='organization',
field=models.ForeignKey(blank=True, to='editorial.Organization', null=True),
),
migrations.AlterField(
model_name='simpleaudio',
name='organization',
field=models.ForeignKey(blank=True, to='editorial.Organization', null=True),
),
migrations.AlterField(
model_name='simpledocument',
name='organization',
field=models.ForeignKey(blank=True, to='editorial.Organization', null=True),
),
migrations.AlterField(
model_name='simpleimage',
name='organization',
field=models.ForeignKey(blank=True, to='editorial.Organization', null=True),
),
migrations.AlterField(
model_name='simplevideo',
name='organization',
field=models.ForeignKey(blank=True, to='editorial.Organization', null=True),
),
migrations.AlterField(
model_name='videoasset',
name='organization',
field=models.ForeignKey(blank=True, to='editorial.Organization', null=True),
),
]
| 33.981818
| 88
| 0.595506
| 161
| 1,869
| 6.813665
| 0.242236
| 0.145852
| 0.182315
| 0.211486
| 0.746582
| 0.746582
| 0.746582
| 0.746582
| 0.746582
| 0.746582
| 0
| 0.012668
| 0.281969
| 1,869
| 54
| 89
| 34.611111
| 0.804769
| 0.011236
| 0
| 0.666667
| 0
| 0
| 0.213434
| 0.107801
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e468d1411246c5662165782687d36f32990a605
| 6,534
|
py
|
Python
|
tests/test_adapter.py
|
optimetry/optimetry
|
aa539f9fcf1cb6eb16ee3491eda7442f30a8248d
|
[
"MIT"
] | 2
|
2020-11-22T21:51:10.000Z
|
2021-06-06T21:27:30.000Z
|
tests/test_adapter.py
|
optimetry/optimetry
|
aa539f9fcf1cb6eb16ee3491eda7442f30a8248d
|
[
"MIT"
] | null | null | null |
tests/test_adapter.py
|
optimetry/optimetry
|
aa539f9fcf1cb6eb16ee3491eda7442f30a8248d
|
[
"MIT"
] | null | null | null |
"""Unit tests for generalized diagonal moment optimizer."""
import copy
import unittest
import torch
import optimetry.zoo
def tiny_net():
"""Small deterministic neural net regression problem for tests."""
torch.manual_seed(0)
net = torch.nn.Sequential(torch.nn.Linear(2, 100),
torch.nn.ReLU(),
torch.nn.Linear(100, 100),
torch.nn.ReLU(),
torch.nn.Linear(100, 1))
X = torch.randn(20, 2)
y = torch.randn(20, 1)
return net, X, y
class TestAdapter(unittest.TestCase):
"""Adapter unit tests."""
def test_adam(self):
"""Test that Adam matches."""
net, X, y = tiny_net()
net_copy = copy.deepcopy(net)
lr = 1e-2
weight_decay = 1e-3
betas = (0.8, 0.9)
optimizer = torch.optim.Adam(net.parameters(),
lr=lr, betas=betas, weight_decay=weight_decay)
optimizer_copy = optimetry.zoo.Adapter(net_copy.parameters(), lr=lr, betas=betas,
weight_decay=weight_decay, adam_bias_correction=True)
for _ in range(10):
optimizer.zero_grad()
loss = (net(X) - y).square().mean()
loss.backward()
optimizer.step()
optimizer_copy.zero_grad()
loss_copy = (net_copy(X) - y).square().mean()
loss_copy.backward()
optimizer_copy.step()
# check that trajectories match
for p, p_copy in zip(net.parameters(), net_copy.parameters()):
self.assertTrue(torch.allclose(p, p_copy, atol=1e-6))
def test_adamw(self):
"""Test that AdamW matches."""
net, X, y = tiny_net()
net_copy = copy.deepcopy(net)
lr = 1e-2
weight_decay = 1e-3
betas = (0.8, 0.9)
optimizer = torch.optim.AdamW(net.parameters(), lr=lr, weight_decay=weight_decay)
optimizer_copy = optimetry.zoo.Adapter(net_copy.parameters(), lr=lr,
weight_decay=weight_decay, adam_bias_correction=True,
decouple_weight_decay=True)
for _ in range(10):
optimizer.zero_grad()
loss = (net(X) - y).square().mean()
loss.backward()
optimizer.step()
optimizer_copy.zero_grad()
loss_copy = (net_copy(X) - y).square().mean()
loss_copy.backward()
optimizer_copy.step()
# check that trajectories match
for p, p_copy in zip(net.parameters(), net_copy.parameters()):
self.assertTrue(torch.allclose(p, p_copy, atol=1e-6))
def test_adagrad(self):
"""Test that AdaGrad matches."""
net, X, y = tiny_net()
net_copy = copy.deepcopy(net)
lr = 1e-2
weight_decay = 1e-3
optimizer = torch.optim.Adagrad(net.parameters(), lr=lr, weight_decay=weight_decay)
optimizer_copy = optimetry.zoo.Adapter(net_copy.parameters(), lr=lr,
betas=(0, 1), gammas=(1, 1),
eps=1e-10, weight_decay=weight_decay)
for _ in range(10):
optimizer.zero_grad()
loss = (net(X) - y).square().mean()
loss.backward()
optimizer.step()
optimizer_copy.zero_grad()
loss_copy = (net_copy(X) - y).square().mean()
loss_copy.backward()
optimizer_copy.step()
# check that trajectories match
for p, p_copy in zip(net.parameters(), net_copy.parameters()):
self.assertTrue(torch.allclose(p, p_copy, atol=1e-6))
def test_rmsprop_with_momentum(self):
"""Test that RMSProp matches with nonzero momentum."""
net, X, y = tiny_net()
net_copy = copy.deepcopy(net)
lr = 1e-2
weight_decay = 1e-3
alpha = 0.9
momentum = 0.7
optimizer = torch.optim.RMSprop(net.parameters(), lr=lr, weight_decay=weight_decay,
alpha=alpha, momentum=momentum)
optimizer_copy = optimetry.zoo.Adapter(net_copy.parameters(), lr=lr,
betas=(momentum, alpha),
gammas=(1, 1-alpha), eps=1e-8,
weight_decay=weight_decay,
condition_before_momentum=True)
for _ in range(10):
optimizer.zero_grad()
loss = (net(X) - y).square().mean()
loss.backward()
optimizer.step()
optimizer_copy.zero_grad()
loss_copy = (net_copy(X) - y).square().mean()
loss_copy.backward()
optimizer_copy.step()
# check that trajectories match
for p, p_copy in zip(net.parameters(), net_copy.parameters()):
self.assertTrue(torch.allclose(p, p_copy, atol=1e-6))
def test_rmsprop_without_momentum(self):
"""Test that RMSProp matches with zero momentum."""
net, X, y = tiny_net()
net_copy = copy.deepcopy(net)
lr = 1e-2
weight_decay = 1e-3
alpha = 0.9
momentum = 0
optimizer = torch.optim.RMSprop(net.parameters(), lr=lr, weight_decay=weight_decay,
alpha=alpha, momentum=momentum)
optimizer_copy = optimetry.zoo.Adapter(net_copy.parameters(), lr=lr,
betas=(momentum, alpha),
gammas=(1, 1-alpha), eps=1e-8,
weight_decay=weight_decay,
condition_before_momentum=True)
for _ in range(10):
optimizer.zero_grad()
loss = (net(X) - y).square().mean()
loss.backward()
optimizer.step()
optimizer_copy.zero_grad()
loss_copy = (net_copy(X) - y).square().mean()
loss_copy.backward()
optimizer_copy.step()
# check that trajectories match
for p, p_copy in zip(net.parameters(), net_copy.parameters()):
self.assertTrue(torch.allclose(p, p_copy, atol=1e-6))
if __name__ == '__main__':
unittest.main()
| 36.099448
| 100
| 0.5176
| 729
| 6,534
| 4.462277
| 0.130316
| 0.087919
| 0.016907
| 0.06763
| 0.841685
| 0.841685
| 0.841685
| 0.818322
| 0.780818
| 0.76514
| 0
| 0.022098
| 0.369758
| 6,534
| 180
| 101
| 36.3
| 0.767848
| 0.069789
| 0
| 0.751938
| 0
| 0
| 0.001326
| 0
| 0
| 0
| 0
| 0
| 0.03876
| 1
| 0.046512
| false
| 0
| 0.031008
| 0
| 0.093023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
28494fd76d59c54f983e8df78c44368be8394fa6
| 74
|
py
|
Python
|
IPython/external/mglob/__init__.py
|
dchichkov/ipython
|
8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4
|
[
"BSD-3-Clause-Clear"
] | 2
|
2015-04-21T12:12:43.000Z
|
2015-04-21T12:12:54.000Z
|
IPython/external/mglob/__init__.py
|
dchichkov/ipython
|
8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4
|
[
"BSD-3-Clause-Clear"
] | 3
|
2015-04-01T13:14:57.000Z
|
2015-05-26T16:01:37.000Z
|
IPython/external/mglob/__init__.py
|
dchichkov/ipython
|
8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4
|
[
"BSD-3-Clause-Clear"
] | 1
|
2021-10-06T07:59:25.000Z
|
2021-10-06T07:59:25.000Z
|
try:
from mglob import *
except ImportError:
from _mglob import *
| 14.8
| 24
| 0.689189
| 9
| 74
| 5.555556
| 0.666667
| 0.36
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.256757
| 74
| 4
| 25
| 18.5
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
285a317f826f0d54f1efcd6014b5cd3f313399ec
| 12,895
|
py
|
Python
|
Software/Python/motion/gcode_gen.py
|
pd3d/magneto
|
da619b58b3e3c0ba9d6ac149e8902d8fc4614ccb
|
[
"MIT"
] | 1
|
2021-05-18T16:50:11.000Z
|
2021-05-18T16:50:11.000Z
|
Software/Python/motion/gcode_gen.py
|
dash-orlando/magneto
|
da619b58b3e3c0ba9d6ac149e8902d8fc4614ccb
|
[
"MIT"
] | null | null | null |
Software/Python/motion/gcode_gen.py
|
dash-orlando/magneto
|
da619b58b3e3c0ba9d6ac149e8902d8fc4614ccb
|
[
"MIT"
] | null | null | null |
"""
Motion Test Script
Script designed to generate GCODE for a given 3D path
Fluvio L Lobo Fenoglietto
"""
import numpy as np
def gcode_gen( out_name, x, y, z, speed ):
"""
Given a 3D path described by its coordinates in x, y, and z,
the function generates a GCODE
10/05/2018 - Added the speed input variable (same for all axes movement)
"""
# Data-specific Variables and Edits ======================================= #
array_len = len(x) # len(y) or len(z)
printer_offset = [150.00, 150.00, 75.00] # offset to center of printer
x = x + printer_offset[0]
y = y + printer_offset[1]
z = z + printer_offset[2]
x = x.round( decimals=2 ) # truncate float to 2 decimals
y = y.round( decimals=2 )
z = z.round( decimals=2 )
ext = ".gcode"
file_name = out_name + ext
file = open( file_name, "w" )
for i in range(0, array_len): # write all the data from the data arrays
if i == 0:
# Terminal Debug ================================================== #
print( '; G-Code generated by gcode_gen version 1.0' )
print( '; Home axes...' )
print( 'G28 ;' ) # gcode command for homing all axes
print( '; Initializing...' )
print( '; Applying Z Offset...' )
print( 'G91 ; Relative position...' ) # set relative position
print( 'G1 Z50 ; Add Z Offset to avoid electronics...' ) # apply Z offset to avoid electronics
print( '; Starting Path...\n' )
print( 'G90 ; Absolute position...\n' ) # set absolute position
print( 'G1 X{} Y{} Z{} F{}'.format( x[i],y[i],z[i],speed ))
# ================================================================= #
# GCODE Generator ================================================= #
file.write( '; G-Code generated by gcode_gen version 1.0\n' )
file.write( '; Home axes...\n' )
file.write( 'G28 ;\n' ) # gcode command for homing all axes
file.write( '; Initializing...\n' )
file.write( '; Applying Z Offset...\n' )
file.write( 'G91 ; Relative position...\n' ) # set relative position
file.write( 'G1 Z50 ; Add Z Offset to avoid electronics...\n' ) # apply Z offset to avoid electronics
file.write( '; Starting Path...\n' )
file.write( 'G90 ; Absolute position...\n' ) # set absolute position
file.write( 'G1 X{} Y{} Z{} F{} \n'.format( x[i],y[i],z[i],speed )) # write to .gcode file...
# ================================================================= #
elif i > 0:
# Terminal Debug ================================================== #
print( 'G1 X{} Y{} Z{} F{}'.format( x[i],y[i],z[i],speed ))
# ================================================================= #
# GCODE Generator ================================================= #
file.write( 'G1 X{} Y{} Z{} F{} \n'.format( x[i],y[i],z[i],speed )) # write to .gcode file...
# ================================================================= #
file.close()
def gcode_gen_2( out_name, x, y, z, printer_offset, speed ):
"""
gcode_gen_2
Newest version of the gcode generator function
Features:
10/12/2018 - Converted printer offsets as an input to the function
"""
# Data-specific Variables and Edits ======================================= #
array_len = len(x) # len(y) or len(z)
x = x + printer_offset[0] # offset to center of printer
y = y + printer_offset[1]
z = z + printer_offset[2]
x = x.round( decimals=2 ) # truncate float to 2 decimals
y = y.round( decimals=2 )
z = z.round( decimals=2 )
ext = ".gcode"
file_name = out_name + ext
file = open( file_name, "w" )
for i in range(0, array_len): # write all the data from the data arrays
if i == 0:
# Terminal Debug ================================================== #
print( '; G-Code generated by gcode_gen version 1.0' )
print( '; Home axes...' )
print( 'G28 ;' ) # gcode command for homing all axes
print( '; Initializing...' )
print( '; Applying Z Offset...' )
print( 'G91 ; Relative position...' ) # set relative position
print( 'G1 Z50 ; Add Z Offset to avoid electronics...' ) # apply Z offset to avoid electronics
print( '; Starting Path...\n' )
print( 'G90 ; Absolute position...\n' ) # set absolute position
print( 'G1 X{} Y{} Z{} F{}'.format( x[i],y[i],z[i],speed ))
# ================================================================= #
# GCODE Generator ================================================= #
file.write( '; G-Code generated by gcode_gen version 1.0\n' )
file.write( '; Home axes...\n' )
file.write( 'G28 ;\n' ) # gcode command for homing all axes
file.write( '; Initializing...\n' )
file.write( '; Applying Z Offset...\n' )
file.write( 'G91 ; Relative position...\n' ) # set relative position
file.write( 'G1 Z50 ; Add Z Offset to avoid electronics...\n' ) # apply Z offset to avoid electronics
file.write( '; Starting Path...\n' )
file.write( 'G90 ; Absolute position...\n' ) # set absolute position
file.write( 'G1 X{} Y{} Z{} F{} \n'.format( x[i],y[i],z[i],speed )) # write to .gcode file...
# ================================================================= #
elif i > 0:
# Terminal Debug ================================================== #
print( 'G1 X{} Y{} Z{} F{}'.format( x[i],y[i],z[i],speed ))
# ================================================================= #
# GCODE Generator ================================================= #
file.write( 'G1 X{} Y{} Z{} F{} \n'.format( x[i],y[i],z[i],speed )) # write to .gcode file...
# ================================================================= #
file.close()
def gcode_gen_cwalk( out_name, x, y, z, printer, speed, interval, mode ):
"""
gcode_gen_rw
Gcode Generator Function specific to a Random Walk, featuring;
intervals --> time intervals (in milliseconds) between motion
mode --> mode of operation;
--> mode = 0 --> normal mode
--> mode = 1 --> steppers are disable and re-enabled between intervals
Features:
10/12/2018 - Converted printer offsets as an input to the function
"""
# Data-specific Variables and Edits ======================================= #
array_len = len(x) # len(y) or len(z)
## x = x + printer_offset[0] # offset to center of printer
## y = y + printer_offset[1]
## z = z + printer_offset[2]
x0 = printer[0].round( decimals=2 )
y0 = printer[1].round( decimals=2 )
z0 = printer[2].round( decimals=2 )
x = x.round( decimals=2 ) # truncate float to 2 decimals
y = y.round( decimals=2 )
z = z.round( decimals=2 )
ext = ".gcode"
file_name = out_name + ext
file = open( file_name, "w" )
for i in range(0, array_len): # write all the data from the data arrays
if i == 0:
# Terminal Debug ================================================== #
print( '; G-Code generated by gcode_gen version 1.0' )
print( '; Home axes...' )
print( 'G28 ;' ) # gcode command for homing all axes
print( '; Initializing...' )
print( '; Moving to center...' )
print( 'G90 ; Relative position...' )
print( 'G1 X{} Y{} Z{} F{}'.format( x0,y0,z0,speed )) # translation to the center of the printer
print( '; Starting Path...\n' )
print( 'G91 ; Relative position...\n' ) # set absolute position
print( 'G1 X{} Y{} Z{} F{}'.format( x[i],y[i],z[i],speed ))
if ( mode == 0 ):
print( 'G4 P{}'.format( interval )) # added interval or wait
elif ( mode == 1 ):
print( 'M18' ) # disable motors
print( 'G4 P{}'.format( interval )) # added interval or wait
print( 'M17' ) # enable motors
# ================================================================= #
# GCODE Generator ================================================= #
file.write( '; G-Code generated by gcode_gen version 1.0\n' )
file.write( '; Home axes...\n' )
file.write( 'G28 ;\n' ) # gcode command for homing all axes
file.write( '; Initializing...\n' )
file.write( '; Moving to center...\n' )
file.write( 'G90 ; Absolute position...\n' )
file.write( 'G1 X{} Y{} Z{} F{} \n'.format( x0,y0,z0,speed )) # translation to the center of the printer
file.write( '; Starting Path...\n' )
file.write( 'G91 ; Relative position...\n' ) # set absolute position
file.write( 'G1 X{} Y{} Z{} F{} \n'.format( x[i],y[i],z[i],speed )) # write to .gcode file...
if ( mode == 0 ):
file.write( 'G4 P{} \n'.format( interval )) # added interval or wait
elif ( mode == 1 ):
file.write( 'M18 \n' ) # disable motors
file.write( 'G4 P{} \n'.format( interval )) # added interval or wait
file.write( 'M17 \n' ) # enable motors
# ================================================================= #
elif i > 0:
# Terminal Debug ================================================== #
print( 'G1 X{} Y{} Z{} F{}'.format( x[i],y[i],z[i],speed ))
if ( mode == 0 ):
print( 'G4 P{}'.format( interval )) # added interval or wait
elif ( mode == 1 ):
print( 'M18' ) # disable motors
print( 'G4 P{}'.format( interval )) # added interval or wait
print( 'M17' ) # enable motors
# ================================================================= #
# GCODE Generator ================================================= #
file.write( 'G1 X{} Y{} Z{} F{} \n'.format( x[i],y[i],z[i],speed )) # write to .gcode file...
if ( mode == 0 ):
file.write( 'G4 P{} \n'.format( interval )) # added interval or wait
elif ( mode == 1 ):
file.write( 'M18 \n' ) # disable motors
file.write( 'G4 P{} \n'.format( interval )) # added interval or wait
file.write( 'M17 \n' ) # enable motors
# ================================================================= #
file.close()
"""
References:
- https://www.simplify3d.com/support/articles/3d-printing-gcode-tutorial/
"""
| 50.767717
| 122
| 0.381698
| 1,249
| 12,895
| 3.905524
| 0.124099
| 0.075646
| 0.010455
| 0.01435
| 0.848503
| 0.833743
| 0.826773
| 0.814063
| 0.805863
| 0.802583
| 0
| 0.025198
| 0.393719
| 12,895
| 253
| 123
| 50.968379
| 0.598746
| 0.329352
| 0
| 0.89781
| 1
| 0
| 0.199328
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021898
| false
| 0
| 0.007299
| 0
| 0.029197
| 0.386861
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
28656fce87eaa5d0c0a0157fcaec17f28b80ace9
| 2,238
|
py
|
Python
|
fichas/migrations/0007_auto_20160925_1625.py
|
reciproco/delma
|
ec20f8b1a7673279643f990a75f73bbdc806927a
|
[
"MIT"
] | null | null | null |
fichas/migrations/0007_auto_20160925_1625.py
|
reciproco/delma
|
ec20f8b1a7673279643f990a75f73bbdc806927a
|
[
"MIT"
] | null | null | null |
fichas/migrations/0007_auto_20160925_1625.py
|
reciproco/delma
|
ec20f8b1a7673279643f990a75f73bbdc806927a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-09-25 16:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fichas', '0006_auto_20160628_2137'),
]
operations = [
migrations.AddField(
model_name='ficha',
name='dr',
field=models.CharField(default=0, max_length=24),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='emg',
field=models.CharField(default=0, max_length=128),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='ext',
field=models.CharField(default=0, max_length=12),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='flex',
field=models.CharField(default=0, max_length=12),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='mov',
field=models.CharField(default=0, max_length=12),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='pc',
field=models.CharField(default=0, max_length=24),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='prosup',
field=models.CharField(default=0, max_length=12),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='rot',
field=models.CharField(default=0, max_length=12),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='rte',
field=models.CharField(default=0, max_length=24),
preserve_default=False,
),
migrations.AddField(
model_name='ficha',
name='tm',
field=models.CharField(default=0, max_length=12),
preserve_default=False,
),
]
| 29.447368
| 62
| 0.540661
| 217
| 2,238
| 5.400922
| 0.258065
| 0.153584
| 0.196246
| 0.230375
| 0.808874
| 0.808874
| 0.778157
| 0.746587
| 0.746587
| 0.698805
| 0
| 0.043003
| 0.345398
| 2,238
| 75
| 63
| 29.84
| 0.756997
| 0.029937
| 0
| 0.720588
| 1
| 0
| 0.050738
| 0.010609
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.073529
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
289f0bad4249819672d1996e0412f8a5d41a0042
| 6,680
|
py
|
Python
|
blog/forms/blog/showcase_forms.py
|
msidargo/msidargo_blog
|
7b412e30532ddab23e81e1a0f5e9a44cd2e7f108
|
[
"MIT"
] | null | null | null |
blog/forms/blog/showcase_forms.py
|
msidargo/msidargo_blog
|
7b412e30532ddab23e81e1a0f5e9a44cd2e7f108
|
[
"MIT"
] | null | null | null |
blog/forms/blog/showcase_forms.py
|
msidargo/msidargo_blog
|
7b412e30532ddab23e81e1a0f5e9a44cd2e7f108
|
[
"MIT"
] | null | null | null |
# Django imports
from django import forms
from django.forms import TextInput, Select, FileInput
# Third-party app imports
from ckeditor.widgets import CKEditorWidget
# Blog app imports
from blog.models.showcase_models import Showcase
from blog.models.category_models import Category
class ShowcaseCreateForm(forms.ModelForm):
category = forms.ModelChoiceField(queryset=Category.objects.filter(
approved=True),
empty_label="Select Category",
widget=forms.Select(attrs=
{
"class": "form-control selectpicker",
"type": "text",
"name": "showcase-category",
"id": "showcaseCategory",
"data-live-search": "true"
}
)
)
class Meta:
# Showcase status constants
DRAFTED = "DRAFTED"
PUBLISHED = "PUBLISHED"
# CHOICES
STATUS_CHOICES = (
(DRAFTED, 'Draft'),
(PUBLISHED, 'Publish'),
)
model = Showcase
fields = ["title", "category", "image", "image_credit", "body", "tags", "status"]
widgets = {
'title': TextInput(attrs={
'name': "showcase-title",
'class': "form-control",
'placeholder': "Enter Showcase Title",
'id': "showcaseTitle"
}),
'image': FileInput(attrs={
"class": "form-control clearablefileinput",
"type": "file",
"id": "showcaseImage",
"name": "showcase-image"
}
),
'image_credit': TextInput(attrs={
'name': "image_credit",
'class': "form-control",
'placeholder': "Example: made4dev.com (Premium Programming T-shirts)",
'id': "image_credit"
}),
'body': forms.CharField(widget=CKEditorWidget(config_name="default", attrs={
"rows": 5, "cols": 20,
'id': 'content',
'name': "showcase_content",
'class': "form-control",
})),
'tags': TextInput(attrs={
'name': "tags",
'class': "form-control",
'placeholder': "Example: sports, game, politics",
'id': "tags",
'data-role': "tagsinput"
}),
'status': Select(choices=STATUS_CHOICES,
attrs=
{
"class": "form-control selectpicker",
"name": "status", "type": "text",
"id": "showcaseStatus",
"data-live-search": "true",
"title": "Select Status"
}
),
}
class ShowcaseUpdateForm(forms.ModelForm):
category = forms.ModelChoiceField(queryset=Category.objects.filter(
approved=True),
empty_label="Select Category",
widget=forms.Select(attrs=
{
"class": "form-control selectpicker",
"type": "text",
"name": "showcase-category",
"id": "showcaseCategory",
"data-live-search": "true"
}
)
)
class Meta:
# Showcase status constants
DRAFTED = "DRAFTED"
PUBLISHED = "PUBLISHED"
# CHOICES
STATUS_CHOICES = (
(DRAFTED, 'Draft'),
(PUBLISHED, 'Publish'),
)
model = Showcase
fields = ["title", "category", "image", "image_credit", "body", "tags", "status"]
widgets = {
'title': TextInput(attrs={
'name': "showcase-title",
'class': "form-control",
'placeholder': "Enter Showcase Title",
'id': "showcaseTitle"
}),
'image_credit': TextInput(attrs={
'name': "image_credit",
'class': "form-control",
'placeholder': "Example: made4dev.com (Premium Programming T-shirts)",
'id': "image_credit"
}),
'status': Select(choices=STATUS_CHOICES,
attrs=
{
"class": "form-control selectpicker",
"name": "status", "type": "text",
"id": "showcaseStatus",
"data-live-search": "true",
"title": "Select Status"
}
),
'body': forms.CharField(widget=CKEditorWidget(config_name="default", attrs={
"rows": 5, "cols": 20,
'id': 'content',
'name': "showcase_content",
'class': "form-control",
})),
'image': FileInput(attrs={
"class": "form-control clearablefileinput",
"type": "file",
"id": "showcaseImage",
"name": "showcase-image",
}
),
}
| 40.731707
| 99
| 0.348952
| 373
| 6,680
| 6.19571
| 0.227882
| 0.050627
| 0.090004
| 0.054522
| 0.848983
| 0.834271
| 0.834271
| 0.834271
| 0.834271
| 0.834271
| 0
| 0.002662
| 0.55015
| 6,680
| 163
| 100
| 40.981595
| 0.766389
| 0.018413
| 0
| 0.757576
| 0
| 0
| 0.212093
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037879
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
955d547268663a8b93945fdcd3f43c4f827c2c16
| 4,171
|
py
|
Python
|
scripts/test_scripts.py
|
hurwitzlab/ohana-blast
|
fb2e46c6ee3594f10576ae74978d96a68423b0df
|
[
"MIT"
] | null | null | null |
scripts/test_scripts.py
|
hurwitzlab/ohana-blast
|
fb2e46c6ee3594f10576ae74978d96a68423b0df
|
[
"MIT"
] | null | null | null |
scripts/test_scripts.py
|
hurwitzlab/ohana-blast
|
fb2e46c6ee3594f10576ae74978d96a68423b0df
|
[
"MIT"
] | null | null | null |
import io
import extractseqs
blast_output_text = """\
A\tHOT234_1_0200m_rep_c55158_2\t100.00\t147\t0\t0\t1\t147\t400\t546 2e-70\t272
A\tHOT234_1_0200m_c10096_4\t100.00\t147\t0\t0\t1\t147\t400\t546\t2e-70\t272
A\tHOT238_1c_0200m_c3_1\t100.00\t147\t0\t0\t1\t147\t1\t147\t2e-70\t272
A\tHOT238_1c_0200m_rep_c260499_1\t100.00\t147\t0\t0\t1\t147\t400\t546\t2e-70\t272
B\tHOT234_1_0200m_rep_c55158_2\t100.00\t147\t0\t0\t1\t147\t400\t546 2e-70\t272
B\tHOT234_1_0200m_c10096_4\t100.00\t147\t0\t0\t1\t147\t400\t546\t2e-70\t272
C\tHOT238_1c_0200m_c3_1\t100.00\t147\t0\t0\t1\t147\t1\t147\t2e-70\t272"""
fasta_text = """\
>A
GTGC
>B
ATGC
>C
ATGG"""
def test_get_blast_hits():
blast_output_file = io.StringIO(blast_output_text)
blast_hits, seqid_counts = extractseqs.get_blast_reference_hits(blast_output_file=blast_output_file, blast_output_row_limit=None)
assert len(blast_hits) == 2
assert len(blast_hits['HOT234_1_0200m']) == 2
assert 'HOT234_1_0200m_rep_c55158_2' in blast_hits['HOT234_1_0200m']
assert 'HOT234_1_0200m_c10096_4' in blast_hits['HOT234_1_0200m']
assert len(blast_hits['HOT238_1c_0200m']) == 2
assert 'HOT238_1c_0200m_c3_1' in blast_hits['HOT238_1c_0200m']
assert 'HOT238_1c_0200m_rep_c260499_1' in blast_hits['HOT238_1c_0200m']
assert len(seqid_counts) == 4
assert seqid_counts['HOT234_1_0200m_rep_c55158_2'] == 2
assert seqid_counts['HOT234_1_0200m_c10096_4'] == 2
assert seqid_counts['HOT238_1c_0200m_c3_1'] == 2
assert seqid_counts['HOT238_1c_0200m_rep_c260499_1'] == 1
def test_get_blast_hits__limit():
blast_output_file = io.StringIO(blast_output_text)
blast_hits, seqid_counts = extractseqs.get_blast_reference_hits(blast_output_file=blast_output_file, blast_output_row_limit=3)
assert len(blast_hits) == 2
assert len(blast_hits['HOT234_1_0200m']) == 2
assert 'HOT234_1_0200m_rep_c55158_2' in blast_hits['HOT234_1_0200m']
assert 'HOT234_1_0200m_c10096_4' in blast_hits['HOT234_1_0200m']
assert len(blast_hits['HOT238_1c_0200m']) == 1
assert 'HOT238_1c_0200m_c3_1' in blast_hits['HOT238_1c_0200m']
assert len(seqid_counts) == 3
assert seqid_counts['HOT234_1_0200m_rep_c55158_2'] == 1
assert seqid_counts['HOT234_1_0200m_c10096_4'] == 1
assert seqid_counts['HOT238_1c_0200m_c3_1'] == 1
def test_find_sequences():
fasta_file = io.StringIO(fasta_text)
search_results = list(extractseqs.find_sequences(['C', 'B', 'A'], fasta_file=fasta_file))
assert len(search_results) == 3
assert search_results[0].id == 'A'
assert search_results[1].id == 'B'
assert search_results[2].id == 'C'
def test_find_sequences__first():
fasta_file = io.StringIO(fasta_text)
search_results = list(extractseqs.find_sequences(['A'], fasta_file=fasta_file))
assert len(search_results) == 1
assert search_results[0].id == 'A'
def test_find_sequences__middle():
fasta_file = io.StringIO(fasta_text)
search_results = list(extractseqs.find_sequences(['B'], fasta_file=fasta_file))
assert len(search_results) == 1
assert search_results[0].id == 'B'
def test_find_sequences__last():
fasta_file = io.StringIO(fasta_text)
search_results = list(extractseqs.find_sequences(['C'], fasta_file=fasta_file))
assert len(search_results) == 1
assert search_results[0].id == 'C'
def test_parse_blast_output_filename__contigs():
blast_input_file_name, seq_type = extractseqs.parse_muscope_blast_output_filename('/some/dir/test_HOT224_1_0025m.fa-contigs.tab')
assert blast_input_file_name == 'test_HOT224_1_0025m.fa'
assert seq_type == 'contigs'
def test_parse_blast_output_filename__genes():
blast_input_file_name, seq_type = extractseqs.parse_muscope_blast_output_filename('/some/dir/test_HOT224_1_0025m.fa-genes.tab')
assert blast_input_file_name == 'test_HOT224_1_0025m.fa'
assert seq_type == 'genes'
def test_parse_blast_output_filename__proteins():
blast_input_file_name, seq_type = extractseqs.parse_muscope_blast_output_filename('/some/dir/test_HOT224_1_0025m.fa-proteins.tab')
assert blast_input_file_name == 'test_HOT224_1_0025m.fa'
assert seq_type == 'proteins'
| 39.349057
| 134
| 0.766003
| 687
| 4,171
| 4.213974
| 0.116448
| 0.037306
| 0.058031
| 0.029016
| 0.899136
| 0.882556
| 0.829706
| 0.813126
| 0.765112
| 0.723661
| 0
| 0.166576
| 0.117718
| 4,171
| 105
| 135
| 39.72381
| 0.620109
| 0
| 0
| 0.303797
| 0
| 0.088608
| 0.308319
| 0.230161
| 0
| 0
| 0
| 0
| 0.481013
| 1
| 0.113924
| false
| 0
| 0.025316
| 0
| 0.139241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
250b71f303964187bdd615f4e289cb2b552a5a24
| 9,512
|
py
|
Python
|
engine/validation-test/disabled/test_longhorn_vm_as_service.py
|
keithalucas/longhorn-tests
|
526d88dff4fd50ebf8d690a19ba327d625d3fcd8
|
[
"Apache-2.0"
] | 10
|
2021-01-25T00:52:46.000Z
|
2022-02-20T01:49:56.000Z
|
engine/validation-test/disabled/test_longhorn_vm_as_service.py
|
keithalucas/longhorn-tests
|
526d88dff4fd50ebf8d690a19ba327d625d3fcd8
|
[
"Apache-2.0"
] | 273
|
2019-06-12T17:43:49.000Z
|
2022-03-29T09:06:02.000Z
|
engine/validation-test/disabled/test_longhorn_vm_as_service.py
|
keithalucas/longhorn-tests
|
526d88dff4fd50ebf8d690a19ba327d625d3fcd8
|
[
"Apache-2.0"
] | 24
|
2019-06-12T04:03:00.000Z
|
2022-03-21T08:08:47.000Z
|
from common_fixtures import * # NOQA
def test_createVMService_with_root_on_longhorn(super_client, client):
port = 6080
env, service, con = createVMService(
super_client, client, "root",
str(port), root_disk=True, data_disk=False)
vms = get_service_vm_list(super_client, service)
assert len(vms) == 1
for vm in vms:
vm_host = get_host_for_vm(client, vm)
assert vm.state == "running"
validate_writes(vm_host, port, is_root=True)
delete_all(client, [env])
delete_vm_volumes(client, vms[0], service)
def test_createVM_with_root_on_longhorn_stop(super_client, client):
port = 6081
# Create VM with ROOT disk using longhorn driver
env, service, con = createVMService(
super_client, client, "root-stop",
str(port), root_disk=True, data_disk=False)
vms = get_service_vm_list(super_client, service)
assert len(vms) == 1
for vm in vms:
vm_host = get_host_for_vm(client, vm)
assert vm.state == "running"
# Validate that we are able to read/write to ROOT disk
file_name, content = validate_writes(
vm_host, port, is_root=True)
# Stop VM
exec_ssh_cmd(vm_host, port, "sync")
vm = client.wait_success(vm.stop())
# Wait for VM to start
wait_for_vm_scale_to_adjust(super_client, service)
vm = client.reload(vm)
assert vm.state == "running"
time.sleep(TIME_TO_BOOT_IN_SEC)
# Validate access to existing file in ROOT disk
assert read_data(vm_host, port, ROOT_DIR, file_name) == content
# Validate reads/writes to ROOT disk
validate_writes(vm_host, port, is_root=True)
delete_all(client, [env])
delete_vm_volumes(client, vms[0], service)
def test_createVM_with_root_on_longhorn_delete(super_client, client):
port = 6082
# Create VM with ROOT disk using longhorn driver
env, service, con = createVMService(
super_client, client, "root-delete",
str(port), root_disk=True, data_disk=False)
vms = get_service_vm_list(super_client, service)
assert len(vms) == 1
for vm in vms:
vm_host = get_host_for_vm(client, vm)
assert vm.state == "running"
# Validate that we are able to read/write to ROOT disk
file_name, content = validate_writes(
vm_host, port, is_root=True)
# Delete VM
exec_ssh_cmd(vm_host, port, "sync")
vm = client.wait_success(client.delete(vm))
assert vm.state == 'removed'
# Wait for VM to be recreated
wait_for_vm_scale_to_adjust(super_client, service)
vms = client.list_virtual_machine(
name=vm.name,
include="hosts",
removed_null=True)
assert len(vms) == 1
vm = vms[0]
vm_host = vms[0].hosts[0]
time.sleep(TIME_TO_BOOT_IN_SEC)
# Validate access to existing file in ROOT disk
assert read_data(vm_host, port, ROOT_DIR, file_name) == content
# Validate reads/writes to ROOT disk
validate_writes(vm_host, port, is_root=True)
delete_all(client, [env])
delete_vm_volumes(client, vms[0], service)
def test_createVM_with_root_and_data_on_longhorn(super_client, client):
port = 6083
env, service, con = createVMService(
super_client, client, "root-data",
str(port), root_disk=True, data_disk=True)
vms = get_service_vm_list(super_client, service)
assert len(vms) == 1
for vm in vms:
assert vm.state == "running"
vm_host = get_host_for_vm(client, vm)
validate_writes(vm_host, port, is_root=True)
validate_writes(vm_host, port, is_root=False)
delete_all(client, [env])
delete_vm_volumes(client, vms[0], service)
def test_createVM_with_root_and_data_on_longhorn_stop_start(
super_client, client):
port = 6084
env, service, con = createVMService(
super_client, client, "root-data-stop",
str(port), root_disk=True, data_disk=True)
vms = get_service_vm_list(super_client, service)
assert len(vms) == 1
for vm in vms:
vm_host = get_host_for_vm(client, vm)
assert vm.state == "running"
# Validate that we are able to read/write to ROOT disk
file_name_r, content_r = validate_writes(
vm_host, port, is_root=True)
file_name_d, content_d = validate_writes(
vm_host, port, is_root=False)
# Stop VM
exec_ssh_cmd(vm_host, port, "sync")
vm = client.wait_success(vm.stop())
# Wait for VM to start
wait_for_vm_scale_to_adjust(super_client, service)
vm = client.reload(vm)
assert vm.state == "running"
time.sleep(TIME_TO_BOOT_IN_SEC)
# Validate access to existing file in ROOT and DATA disk
assert read_data(vm_host, port, ROOT_DIR, file_name_r) == content_r
mount_data_dir_check_file(vm_host, port, file_name_d, content_d)
# Validate writes to ROOT disk
validate_writes(vm_host, port, is_root=True)
validate_writes(vm_host, port, is_root=False)
delete_all(client, [env])
delete_vm_volumes(client, vms[0], service)
def test_createVM_with_root_and_data_on_longhorn_delete(super_client, client):
port = 6085
env, service, con = createVMService(
super_client, client, "root-data-delete",
str(port), root_disk=True, data_disk=True)
vms = get_service_vm_list(super_client, service)
assert len(vms) == 1
for vm in vms:
vm_host = get_host_for_vm(client, vm)
assert vm.state == "running"
file_name_r, content_r = validate_writes(
vm_host, port, is_root=True)
file_name_d, content_d = validate_writes(
vm_host, port, is_root=False)
exec_ssh_cmd(vm_host, port, "sync")
# Delete VM
exec_ssh_cmd(vm_host, port, "sync")
vm = client.wait_success(client.delete(vm))
assert vm.state == 'removed'
# Wait for VM to be recreated
wait_for_vm_scale_to_adjust(super_client, service)
vms = client.list_virtual_machine(
name=vm.name,
include="hosts",
removed_null=True)
assert len(vms) == 1
vm = vms[0]
vm_host = vms[0].hosts[0]
time.sleep(TIME_TO_BOOT_IN_SEC)
assert read_data(vm_host, port, ROOT_DIR, file_name_r) == content_r
make_dir(vm_host, port, DATA_DIR, False)
mount_data_dir_check_file(vm_host, port, file_name_d, content_d)
validate_writes(vm_host, port, is_root=True)
validate_writes(vm_host, port, is_root=False)
delete_all(client, [env])
delete_vm_volumes(client, vms[0], service)
def test_createVM_with_root_and_data_on_longhorn_ha(super_client, client):
port = 6086
env, service, con = createVMService(
super_client, client, "ha",
str(port), root_disk=True, data_disk=True)
vms = get_service_vm_list(super_client, service)
assert len(vms) == 1
for vm in vms:
vm_host = get_host_for_vm(client, vm)
assert vm.state == "running"
file_name_r, content_r = validate_writes(
vm_host, port, is_root=True)
file_name_d, content_d = validate_writes(vm_host, port, is_root=False)
exec_ssh_cmd(vm_host, port, "sync")
# Deactivate Host
vm_host = client.wait_success(vm_host.deactivate())
assert vm_host.state == 'inactive'
# Delete VM
exec_ssh_cmd(vm_host, port, "sync")
vm = client.wait_success(client.delete(vm))
assert vm.state == 'removed'
wait_for_condition(
super_client, vm,
lambda x: x.state == "purged",
lambda x: 'State is: ' + x.state)
# Wait for VM to be recreated
wait_for_vm_scale_to_adjust(super_client, service)
vms = client.list_virtual_machine(
name=vm.name,
include="hosts",
removed_null=True)
assert len(vms) == 1
vm = vms[0]
new_vm_host = vms[0].hosts[0]
time.sleep(TIME_TO_BOOT_IN_SEC)
assert \
read_data(new_vm_host, port, ROOT_DIR, file_name_r) == content_r
make_dir(new_vm_host, port, DATA_DIR, False)
mount_data_dir_check_file(new_vm_host, port, file_name_d, content_d)
validate_writes(new_vm_host, port, is_root=True)
validate_writes(new_vm_host, port, is_root=False)
# Activate Host
vm_host = client.wait_success(vm_host.activate())
assert vm_host.state == 'active'
delete_all(client, [env])
delete_vm_volumes(client, vms[0], service)
def test_createVM_with_root_and_data_on_longhorn_multiple(
super_client, client):
ports = [6088, 6089, 6090, 6091]
scale = 1
for port in ports:
env, service, con = createVMService(
super_client, client, "multiple",
str(port), root_disk=True, data_disk=True,
scale=scale)
vms = get_service_vm_list(super_client, service)
assert len(vms) == scale
for vm in vms:
assert vm.state == "running"
assert vm.healthState == "healthy"
vm_host = get_host_for_vm(client, vm)
validate_writes(vm_host, port, is_root=True)
validate_writes(vm_host, port, is_root=False)
delete_all(client, [service])
for vm in vms:
delete_vm_volumes(client, vm, service)
| 34.842491
| 78
| 0.64508
| 1,354
| 9,512
| 4.220089
| 0.084195
| 0.057753
| 0.066503
| 0.044102
| 0.913896
| 0.913896
| 0.90392
| 0.878719
| 0.831117
| 0.80329
| 0
| 0.010108
| 0.261564
| 9,512
| 272
| 79
| 34.970588
| 0.803388
| 0.074117
| 0
| 0.751244
| 0
| 0
| 0.027784
| 0
| 0
| 0
| 0
| 0
| 0.159204
| 1
| 0.039801
| false
| 0
| 0.004975
| 0
| 0.044776
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
254e4ffdcbef92fba8bae49a7e9029f8100956d3
| 1,784
|
py
|
Python
|
lut2d/test_data.py
|
martinxyz/pixelcrawl
|
e1218be20ec2fb65ab577b366f54546b59db5854
|
[
"MIT"
] | 4
|
2019-05-24T13:31:43.000Z
|
2019-05-26T08:50:21.000Z
|
lut2d/test_data.py
|
martinxyz/pixelcrawl
|
e1218be20ec2fb65ab577b366f54546b59db5854
|
[
"MIT"
] | 1
|
2019-10-31T02:21:45.000Z
|
2019-10-31T02:21:46.000Z
|
lut2d/test_data.py
|
martinxyz/pixelcrawl
|
e1218be20ec2fb65ab577b366f54546b59db5854
|
[
"MIT"
] | 1
|
2019-05-24T13:32:10.000Z
|
2019-05-24T13:32:10.000Z
|
from numpy import array
test_lut = \
array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0,
1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0,
0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1,
1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0,
0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0,
1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1,
1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0,
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1,
1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1], dtype='uint8')
test_lut.flags.writeable = False
| 63.714286
| 75
| 0.326233
| 526
| 1,784
| 1.102662
| 0.024715
| 0.689655
| 0.791379
| 0.77931
| 0.882759
| 0.882759
| 0.882759
| 0.875862
| 0.875862
| 0.865517
| 0
| 0.463834
| 0.380045
| 1,784
| 27
| 76
| 66.074074
| 0.060579
| 0
| 0
| 0
| 0
| 0
| 0.002803
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
c25885dd356e2c0ea2392fa480fbd63cd31412b6
| 33
|
py
|
Python
|
cases/tilde.py
|
minakoyang/YY_python2.7_interpreter_in_CPP
|
e949f4bbd27752e6dbfef0a887d9567345d512f4
|
[
"MIT"
] | 1
|
2019-04-30T16:27:19.000Z
|
2019-04-30T16:27:19.000Z
|
cases/tilde.py
|
minakoyang/YY_python2.7_interpreter_in_CPP
|
e949f4bbd27752e6dbfef0a887d9567345d512f4
|
[
"MIT"
] | null | null | null |
cases/tilde.py
|
minakoyang/YY_python2.7_interpreter_in_CPP
|
e949f4bbd27752e6dbfef0a887d9567345d512f4
|
[
"MIT"
] | null | null | null |
print ~10
print ~-10
print ~~10
| 8.25
| 11
| 0.636364
| 6
| 33
| 3.5
| 0.333333
| 1
| 1.142857
| 1.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0.212121
| 33
| 3
| 12
| 11
| 0.576923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
c28b589c07249789a4a227e83af2e3e0f32da148
| 20,853
|
py
|
Python
|
docusign_esign/apis/signature_api.py
|
joekohlsdorf/docusign-esign-python-client
|
40407544f79c88716d36fabf36f65c3ef1a5c3ba
|
[
"MIT"
] | 58
|
2017-10-18T23:06:57.000Z
|
2021-04-15T23:14:58.000Z
|
docusign_esign/apis/signature_api.py
|
joekohlsdorf/docusign-esign-python-client
|
40407544f79c88716d36fabf36f65c3ef1a5c3ba
|
[
"MIT"
] | 49
|
2017-10-27T05:54:09.000Z
|
2021-04-29T22:06:17.000Z
|
docusign_esign/apis/signature_api.py
|
joekohlsdorf/docusign-esign-python-client
|
40407544f79c88716d36fabf36f65c3ef1a5c3ba
|
[
"MIT"
] | 49
|
2017-09-16T07:23:41.000Z
|
2021-05-07T20:21:20.000Z
|
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..client.configuration import Configuration
from ..client.api_client import ApiClient
class SignatureApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def complete_sign_hash(self, **kwargs):
"""
Complete Sign Hash
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.complete_sign_hash(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CompleteSignRequest complete_sign_request:
:return: CompleteSignHashResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.complete_sign_hash_with_http_info(**kwargs)
else:
(data) = self.complete_sign_hash_with_http_info(**kwargs)
return data
def complete_sign_hash_with_http_info(self, **kwargs):
"""
Complete Sign Hash
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.complete_sign_hash_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CompleteSignRequest complete_sign_request:
:return: CompleteSignHashResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complete_sign_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method complete_sign_hash" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v2.1/signature/completesignhash'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'complete_sign_request' in params:
body_params = params['complete_sign_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CompleteSignHashResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_info(self, **kwargs):
"""
Get User Info To Sign Document
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: UserInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_info_with_http_info(**kwargs)
else:
(data) = self.get_user_info_with_http_info(**kwargs)
return data
def get_user_info_with_http_info(self, **kwargs):
"""
Get User Info To Sign Document
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_info_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: UserInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_info" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v2.1/signature/userInfo'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserInfoResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def health_check(self, **kwargs):
"""
Report status from the TSP to DocuSign
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.health_check(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param TspHealthCheckRequest tsp_health_check_request:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.health_check_with_http_info(**kwargs)
else:
(data) = self.health_check_with_http_info(**kwargs)
return data
def health_check_with_http_info(self, **kwargs):
"""
Report status from the TSP to DocuSign
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.health_check_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param TspHealthCheckRequest tsp_health_check_request:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tsp_health_check_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method health_check" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v2.1/signature/healthcheck'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'tsp_health_check_request' in params:
body_params = params['tsp_health_check_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def sign_hash_session_info(self, **kwargs):
"""
Get Signature Session Info To Sign Document Hash
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.sign_hash_session_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SignSessionInfoRequest sign_session_info_request:
:return: SignHashSessionInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.sign_hash_session_info_with_http_info(**kwargs)
else:
(data) = self.sign_hash_session_info_with_http_info(**kwargs)
return data
def sign_hash_session_info_with_http_info(self, **kwargs):
"""
Get Signature Session Info To Sign Document Hash
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.sign_hash_session_info_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SignSessionInfoRequest sign_session_info_request:
:return: SignHashSessionInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['sign_session_info_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method sign_hash_session_info" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v2.1/signature/signhashsessioninfo'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'sign_session_info_request' in params:
body_params = params['sign_session_info_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SignHashSessionInfoResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_transaction(self, **kwargs):
"""
Report an error from the tsp to docusign
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_transaction(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param UpdateTransactionRequest update_transaction_request:
:return: UpdateTransactionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_transaction_with_http_info(**kwargs)
else:
(data) = self.update_transaction_with_http_info(**kwargs)
return data
def update_transaction_with_http_info(self, **kwargs):
"""
Report an error from the tsp to docusign
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_transaction_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param UpdateTransactionRequest update_transaction_request:
:return: UpdateTransactionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['update_transaction_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_transaction" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v2.1/signature/updatetransaction'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_transaction_request' in params:
body_params = params['update_transaction_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpdateTransactionResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.56926
| 140
| 0.563756
| 2,007
| 20,853
| 5.590932
| 0.08869
| 0.071295
| 0.024953
| 0.032083
| 0.910258
| 0.896979
| 0.88067
| 0.852687
| 0.837002
| 0.830051
| 0
| 0.001353
| 0.361818
| 20,853
| 526
| 141
| 39.644487
| 0.841824
| 0.319954
| 0
| 0.733591
| 0
| 0
| 0.146366
| 0.067392
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042471
| false
| 0
| 0.027027
| 0
| 0.131274
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c440bf7f655d1577be0f1076b9498d88ec4ee7f
| 119
|
py
|
Python
|
torchsupport/flex/tasks/energy/__init__.py
|
coding-ecoli/torchsupport
|
a8de0e8db4f4b9cfd4ac93c0fa6923fe9151b309
|
[
"MIT"
] | 18
|
2019-05-02T16:32:15.000Z
|
2021-04-16T09:33:54.000Z
|
torchsupport/flex/tasks/energy/__init__.py
|
coding-ecoli/torchsupport
|
a8de0e8db4f4b9cfd4ac93c0fa6923fe9151b309
|
[
"MIT"
] | 5
|
2019-10-14T13:46:49.000Z
|
2021-06-08T11:48:34.000Z
|
torchsupport/flex/tasks/energy/__init__.py
|
coding-ecoli/torchsupport
|
a8de0e8db4f4b9cfd4ac93c0fa6923fe9151b309
|
[
"MIT"
] | 12
|
2019-05-12T21:34:24.000Z
|
2021-07-15T14:14:16.000Z
|
from torchsupport.flex.tasks.energy.density_ratio import *
from torchsupport.flex.tasks.energy.score_matching import *
| 39.666667
| 59
| 0.848739
| 16
| 119
| 6.1875
| 0.625
| 0.323232
| 0.40404
| 0.505051
| 0.626263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067227
| 119
| 2
| 60
| 59.5
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6c63c286c6ddfcb1821838a7532ca5102ed7fe77
| 13,091
|
py
|
Python
|
tools/cardiac_py/experiments/fourteen.py
|
paulkefer/cardioid
|
59c07b714d8b066b4f84eb50487c36f6eadf634c
|
[
"MIT-0",
"MIT"
] | 33
|
2018-12-12T20:05:06.000Z
|
2021-09-26T13:30:16.000Z
|
tools/cardiac_py/experiments/fourteen.py
|
paulkefer/cardioid
|
59c07b714d8b066b4f84eb50487c36f6eadf634c
|
[
"MIT-0",
"MIT"
] | 5
|
2019-04-25T11:34:43.000Z
|
2021-11-14T04:35:37.000Z
|
tools/cardiac_py/experiments/fourteen.py
|
paulkefer/cardioid
|
59c07b714d8b066b4f84eb50487c36f6eadf634c
|
[
"MIT-0",
"MIT"
] | 15
|
2018-12-21T22:44:59.000Z
|
2021-08-29T10:30:25.000Z
|
'''
Created on 08/01/2013
@author: butler
'''
from . import ECG_plots
import os
class Experiment():
def __init__(self, root_dir="/Users/butler/Desktop/CARDIOID/ECG_Data"):
self.root_dir = root_dir
self.experiment_dir = "experiment_14"
self.period_ms = 2000
self.stimulus_time_ms = 20510
def plot_ECGs_for_object_data_set_g_NaL(self):
"""
This plots two runs where we try to trigger reentry with & without I_k
block.
Here the g_NaL parameters were set using the object.data clone
properties
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_5"
plotter.normal_legend = "$I_{Kr} = 0.15$"
plotter.modified_dir_name = "run_6"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("Lead_1_object_data_set_g_NaL.eps")
def plot_ECGs_for_binary_set_g_NaL(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block.
Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_9"
plotter.normal_legend = "$I_{Kr} = 0.15$"
plotter.modified_dir_name = "run_10"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("Lead_1_source_set_g_NaL.eps")
def plot_ECG_overlay_set_beats(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_9"
plotter.normal_legend = "$I_{Kr} = 0.15$"
plotter.modified_dir_name = "run_10"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.period_ms = 2000
plotter.overlay_ECG_beat("Beat_9.eps", 9)
plotter.overlay_ECG_beat("Beat_10.eps", 10)
def plot_ECGs_for_r1310_comparison(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block.
Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_10"
plotter.normal_legend = "Precompiled"
plotter.modified_dir_name = "run_13"
plotter.modified_legend = "r1310"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("r1310_comparison.eps")
def plot_fast_pacing(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_11"
plotter.normal_legend = "$I_{Kr} = 0.15$"
plotter.modified_dir_name = "run_12"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("Lead_1_1hz.eps")
def plot_fast_pacing_single_beat(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_11"
plotter.normal_legend = "$I_{Kr} = 0.15$"
plotter.modified_dir_name = "run_12"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_beat("Beat_9_1_hz.eps", 9)
def plot_ECG_0_1_mm(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block.
Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_7"
plotter.normal_legend = "$I_{Kr} = 0.00$"
plotter.modified_dir_name = "run_7"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("Lead_1_source_set_01mm.eps")
def plot_run_15(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block.
Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_15"
plotter.normal_legend = "$I_{Kr} = 0.00$"
plotter.modified_dir_name = "run_15"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("Plot_run_15.eps")
def plot_ECG_0_1_m_old_mesh(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block.
Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_20"
plotter.normal_legend = "$I_{Kr} = 0.153$"
plotter.modified_dir_name = "run_21"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("OLD_MESH.eps")
def plot_ECG_0_1a_m_new(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block.
Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_22"
plotter.normal_legend = "$I_{Kr} = 0.153$"
plotter.modified_dir_name = "run_23"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("new_0.1mm_mesh.eps")
print 'This was for runs: ', plotter.normal_dir_name
print 'and: ', plotter.modified_dir_name
def plot_ECG_0_2_0_1_compare(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block.
Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_5"
plotter.normal_legend = "$\Delta x = 0.2$"
plotter.modified_dir_name = "run_22"
plotter.modified_legend = "$\Delta x = 0.1$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_plus_difference("resolution_comparison.eps")
print 'This was for runs: ', plotter.normal_dir_name
print 'and: ', plotter.modified_dir_name
def plot_ECG_0_2_0_1_compare_beat_all_leads(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block.
Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_5"
plotter.normal_legend = "$\Delta x = 0.2$"
plotter.modified_dir_name = "run_22"
plotter.modified_legend = "$\Delta x = 0.1$"
plotter.period_ms = self.period_ms
plotter.load_data()
for ii in range(7):
if ii == 0:
plotter.set_ECG_type(0, -1)
else:
plotter.set_ECG_type(ii, 1)
plotter.overlay_plus_perc_error_beat("frac_error_beat_9_lead_" +
repr(ii) + ".pdf", 9)
plotter.overlay_plus_perc_error_beat("frac_error_beat_10_lead_" +
repr(ii) + ".pdf", 10)
plotter.overlay_plus_perc_error_beat("frac_error_beat_11_lead_" +
repr(ii) + ".pdf", 11)
plotter.overlay_plus_perc_error_beat("frac_error_beat_12_lead_" +
repr(ii) + ".pdf", 12)
print 'This was for runs: ', plotter.normal_dir_name
print 'and: ', plotter.modified_dir_name
def plot_ECG_0_2_0_1_compare_error(self):
"""
This plots two runs where we try to trigger reentry with & without I_kr
block. Here the g_NaL parameters were set using source code only
"""
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_5"
plotter.normal_legend = "$\Delta x = 0.2$"
plotter.modified_dir_name = "run_22"
plotter.modified_legend = "$\Delta x = 0.1$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_plus_perc_error(
"resolution_comparison_as_perc_error.eps")
print 'This was for runs: ', plotter.normal_dir_name
print 'and: ', plotter.modified_dir_name
def plot_1hz_vs_2hz_ikr_normal(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.period_ms = self.period_ms
plotter.normal_dir_name = "run_5"
plotter.normal_legend = "$0.5 Hz$"
plotter.modified_dir_name = "run_11"
plotter.modified_legend = "$1 hz$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_beat("freq_comparison_ikr_normal_a.eps", 2)
plotter.overlay_ECG_beat("freq_comparison_ikr_normal_b.eps", 3)
def plot_1hz_vs_2hz_ikr_zeroed(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.period_ms = self.period_ms
plotter.normal_dir_name = "run_6"
plotter.normal_legend = "$0.5 Hz$"
plotter.modified_dir_name = "run_12"
plotter.modified_legend = "$1 hz$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_beat("freq_comparison_ikr_zeroed_a.eps", 2)
plotter.overlay_ECG_beat("freq_comparison_ikr_zeroed_b.eps", 3)
def plot_rentry_timing(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.period_ms = self.period_ms
plotter.normal_dir_name = "run_5"
plotter.normal_legend = "$i_{kr} = 0.153$"
plotter.modified_dir_name = "run_6"
plotter.modified_legend = "$i_{kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_reent("Reentry_intiation_capture.eps", 11)
def plot_g_NaL_0_25(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_24"
plotter.normal_legend = "$I_{Kr} = 0.153$"
plotter.modified_dir_name = "run_25"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("Lead_g_NaL_0_25.eps")
print 'This was for runs: ', plotter.normal_dir_name
print 'and: ', plotter.modified_dir_name
def plot_g_NaL_0_15(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_26"
plotter.normal_legend = "$I_{Kr} = 0.153$"
plotter.modified_dir_name = "run_27"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("Lead_g_NaL_0_15.eps")
print 'This was for runs: ', plotter.normal_dir_name
print 'and: ', plotter.modified_dir_name
def plot_delayed_S2(self):
experiment_dir = self.root_dir + os.sep + self.experiment_dir
plotter = ECG_plots.PlotECGs(experiment_dir)
plotter.normal_dir_name = "run_28"
plotter.normal_legend = "$I_{Kr} = 0.153$"
plotter.modified_dir_name = "run_29"
plotter.modified_legend = "$I_{Kr} = 0.0$"
plotter.load_data()
plotter.set_ECG_type(0, -1)
plotter.overlay_ECG_full("Lead_1_delayed_s2.eps")
print 'This was for runs: ', plotter.normal_dir_name
print 'and: ', plotter.modified_dir_name
| 42.093248
| 79
| 0.637003
| 1,815
| 13,091
| 4.246281
| 0.08595
| 0.097833
| 0.098612
| 0.067471
| 0.90048
| 0.872843
| 0.857273
| 0.853769
| 0.850655
| 0.822629
| 0
| 0.031856
| 0.263845
| 13,091
| 310
| 80
| 42.229032
| 0.767874
| 0
| 0
| 0.637931
| 0
| 0
| 0.137457
| 0.040909
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.008621
| null | null | 0.060345
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6671d4ffc65c7fa5e2b27cbf307f21d8dec0c8cc
| 24,211
|
py
|
Python
|
DQM/L1TMonitorClient/python/L1TEventInfoClient_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
DQM/L1TMonitorClient/python/L1TEventInfoClient_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
DQM/L1TMonitorClient/python/L1TEventInfoClient_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
# L1 Trigger Event Info client cfi
#
# The cfi can be used, with appropriate settings, for both L1T and L1TEMU.
# Default version in cfi: L1T event client
#
# authors previous versions - see CVS
#
# V.M. Ghete 2010-10-22 revised version of L1T DQM and L1TEMU DQM
import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDHarvester import DQMEDHarvester
l1tEventInfoClient = DQMEDHarvester("L1TEventInfoClient",
monitorDir = cms.untracked.string("L1T"),
# decide when to run and update the results of the quality tests
# retrieval of quality test results must be consistent with the event / LS / Run execution
#
runInEventLoop=cms.untracked.bool(False),
runInEndLumi=cms.untracked.bool(True),
runInEndRun=cms.untracked.bool(True),
runInEndJob=cms.untracked.bool(False),
#
# for each L1 system, give:
# - SystemLabel: system label
# - HwValLabel: system label as used in hardware validation package
# (the package producing the ErrorFlag histogram)
# - SystemDisable: system disabled: if 1, all quality tests for the system
# are disabled in the summary plot
# - for each quality test:
# - QualityTestName: name of quality test
# - QualityTestHist: histogram (full path)
# - QualityTestSummaryEnabled: 0 if disabled, 1 if enabled in summary plot
#
# the position in the parameter set gives, in reverse order, the position in the reportSummaryMap
# in the emulator column (left column)
L1Systems = cms.VPSet(
cms.PSet(
SystemLabel = cms.string("ECAL_TPG"),
HwValLabel = cms.string("ETP"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
SystemLabel = cms.string("HCAL_TPG"),
HwValLabel = cms.string("HTP"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
SystemLabel = cms.string("RCT"),
HwValLabel = cms.string("RCT"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string("HotChannels_RCT_2D"),
QualityTestHist = cms.string("L1T/L1TRCT/Layer2EmIsoEmEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("DeadChannels_RCT_2D_loose"),
QualityTestHist = cms.string("L1T/L1TRCT/Layer2EmIsoEmEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
#
cms.PSet(
QualityTestName = cms.string("HotChannels_RCT_2D"),
QualityTestHist = cms.string("L1T/L1TRCT/Layer2EmNonIsoEmEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("DeadChannels_RCT_2D_loose"),
QualityTestHist = cms.string("L1T/L1TRCT/Layer2EmNonIsoEmEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
#
cms.PSet(
QualityTestName = cms.string("HotChannels_RCT_2D"),
QualityTestHist = cms.string("L1T/L1TRCT/Layer2RegionsEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("DeadChannels_RCT_2D_tight"),
QualityTestHist = cms.string("L1T/L1TRCT/Layer2RegionsEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
)
),
cms.PSet(
SystemLabel = cms.string("Stage1Layer2"),
HwValLabel = cms.string("Stage1Layer2"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string("HotChannels_GCT_2D"),
QualityTestHist = cms.string("L1T/L1TStage1Layer2/IsoEmRankEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("DeadChannels_GCT_2D_loose"),
QualityTestHist = cms.string("L1T/L1TStage1Layer2/IsoEmRankEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
#
cms.PSet(
QualityTestName = cms.string("HotChannels_GCT_2D"),
QualityTestHist = cms.string("L1T/L1TStage1Layer2/IsoEmRankEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("DeadChannels_GCT_2D_loose"),
QualityTestHist = cms.string("L1T/L1TStage1Layer2/NonIsoEmRankEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
#
cms.PSet(
QualityTestName = cms.string("HotChannels_GCT_2D"),
QualityTestHist = cms.string("L1T/L1TStage1Layer2/AllJetsEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("DeadChannels_GCT_2D_tight"),
QualityTestHist = cms.string("L1T/L1TStage1Layer2/AllJetsEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("HotChannels_GCT_2D"),
QualityTestHist = cms.string("L1T/L1TStage1Layer2/TauJetsEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("DeadChannels_GCT_2D_tight"),
QualityTestHist = cms.string("L1T/L1TStage1Layer2/TauJetsEtEtaPhi"),
QualityTestSummaryEnabled = cms.uint32(1)
)
)
),
cms.PSet(
SystemLabel = cms.string("DT_TPG"),
HwValLabel = cms.string("DTP"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
# FIXME what are the histograms to be tested?
cms.PSet(
SystemLabel = cms.string("DTTF"),
HwValLabel = cms.string("DTF"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
SystemLabel = cms.string("CSC_TPG"),
HwValLabel = cms.string("CTP"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
SystemLabel = cms.string("CSCTF"),
HwValLabel = cms.string("CTF"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string("DeadChannels_CSCTF_2D"),
QualityTestHist = cms.string("L1T/L1TCSCTF/CSCTF_Chamber_Occupancies"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("HotChannels_CSCTF_2D"),
QualityTestHist = cms.string("L1T/L1TCSCTF/CSCTF_Chamber_Occupancies"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("DeadChannels_CSCTF_2D"),
QualityTestHist = cms.string("L1T/L1TCSCTF/CSCTF_occupancies"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("HotChannels_CSCTF_2D"),
QualityTestHist = cms.string("L1T/L1TCSCTF/CSCTF_occupancies"),
QualityTestSummaryEnabled = cms.uint32(1)
)
)
),
cms.PSet(
SystemLabel = cms.string("RPC"),
HwValLabel = cms.string("RPC"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string("DeadChannels_RPCTF_2D"),
QualityTestHist = cms.string("L1T/L1TRPCTF/RPCTF_muons_eta_phi_bx0"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("HotChannels_RPCTF_2D"),
QualityTestHist = cms.string("L1T/L1TRPCTF/RPCTF_muons_eta_phi_bx0"),
QualityTestSummaryEnabled = cms.uint32(1)
)
)
),
cms.PSet(
SystemLabel = cms.string("GMT"),
HwValLabel = cms.string("GMT"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string("DeadChannels_GMT_2D"),
QualityTestHist = cms.string("L1T/L1TGMT/GMT_etaphi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("HotChannels_GMT_2D"),
QualityTestHist = cms.string("L1T/L1TGMT/GMT_etaphi"),
QualityTestSummaryEnabled = cms.uint32(1)
),
cms.PSet(
QualityTestName = cms.string("CompareHist_GMT"),
QualityTestHist = cms.string("L1T/L1TGMT/Regional_trigger"),
QualityTestSummaryEnabled = cms.uint32(1)
)
)
),
cms.PSet(
SystemLabel = cms.string("GT"),
HwValLabel = cms.string("GT"),
SystemDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
)
),
#
# for each L1 trigger object, give:
# - ObjectLabel: object label as used in enum L1GtObject
# - ObjectDisable: emulator mask: if 1, the system is masked in the summary plot
#
# the position in the parameter set gives, in reverse order, the position in the reportSummaryMap
# in the trigger object column (right column)
L1Objects = cms.VPSet(
cms.PSet(
ObjectLabel = cms.string("TechTrig"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("GtExternal"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("HfRingEtSums"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("HfBitCounts"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("HTM"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("HTT"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("ETM"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("ETT"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("Tau"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("ForJet"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("CenJet"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("IsoEG"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("NoIsoEG"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
),
cms.PSet(
ObjectLabel = cms.string("Mu"),
ObjectDisable = cms.uint32(0),
QualityTests = cms.VPSet(
cms.PSet(
QualityTestName = cms.string(""),
QualityTestHist = cms.string(""),
QualityTestSummaryEnabled = cms.uint32(0)
)
)
)
),
#
# fast over-mask a system: if the name of the system is in the list, the system will be masked
# (the default mask value is given in L1Systems VPSet)
#
DisableL1Systems = cms.vstring(),
#
# fast over-mask an object: if the name of the object is in the list, the object will be masked
# (the default mask value is given in L1Objects VPSet)
#
DisableL1Objects = cms.vstring()
)
| 54.406742
| 103
| 0.366693
| 1,372
| 24,211
| 6.417638
| 0.144315
| 0.124702
| 0.051107
| 0.12209
| 0.797047
| 0.77774
| 0.777172
| 0.777172
| 0.777172
| 0.761045
| 0
| 0.031167
| 0.562678
| 24,211
| 444
| 104
| 54.529279
| 0.800434
| 0.077775
| 0
| 0.723785
| 0
| 0
| 0.064622
| 0.044728
| 0
| 0
| 0
| 0.002252
| 0
| 1
| 0
| false
| 0
| 0.005115
| 0
| 0.005115
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66a1959554ef605a50e2cc90c07693d0ef307b73
| 778
|
py
|
Python
|
mypy/15/movimiento2.py
|
seldoncode/Python_CoderDojo
|
2b0e33ac517cae853af00122b14c4e5719d770c3
|
[
"MIT"
] | null | null | null |
mypy/15/movimiento2.py
|
seldoncode/Python_CoderDojo
|
2b0e33ac517cae853af00122b14c4e5719d770c3
|
[
"MIT"
] | null | null | null |
mypy/15/movimiento2.py
|
seldoncode/Python_CoderDojo
|
2b0e33ac517cae853af00122b14c4e5719d770c3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import time
espacios = ""
for i in range(20):
time.sleep(.2)
os.system("clear")
print()
print(f"{espacios} o ")
print(f"{espacios} /|\ ")
print(f"{espacios} / | ")
espacios += " "
time.sleep(.2)
os.system("clear")
print()
print(f"{espacios} o ")
print(f"{espacios} (|\ ")
print(f"{espacios} | | ")
espacios += " "
time.sleep(.2)
os.system("clear")
print()
print(f"{espacios} o ")
print(f"{espacios} /|\ ")
print(f"{espacios} | \ ")
espacios += " "
time.sleep(.2)
os.system("clear")
print()
print(f"{espacios} o ")
print(f"{espacios} /|) ")
print(f"{espacios} | | ")
espacios += " "
| 16.913043
| 32
| 0.475578
| 86
| 778
| 4.302326
| 0.232558
| 0.194595
| 0.454054
| 0.12973
| 0.854054
| 0.854054
| 0.854054
| 0.854054
| 0.854054
| 0.854054
| 0
| 0.011257
| 0.31491
| 778
| 46
| 33
| 16.913043
| 0.682927
| 0.025707
| 0
| 0.75
| 0
| 0
| 0.311346
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0625
| 0
| 0.0625
| 0.5
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
dd77e94baa4d8f705e9091944786f08195f36b46
| 179
|
py
|
Python
|
typings/console/intellisense.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | 2
|
2021-12-12T18:51:52.000Z
|
2022-02-23T09:49:16.000Z
|
src/blender/blender_autocomplete-master/2.92/console/intellisense.py
|
JonasWard/ClayAdventures
|
a716445ac690e4792e70658319aa1d5299f9c9e9
|
[
"MIT"
] | 2
|
2021-11-08T12:09:02.000Z
|
2021-12-12T23:01:12.000Z
|
typings/console/intellisense.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | null | null | null |
import sys
import typing
def complete(line, cursor, namespace, private):
'''
'''
pass
def expand(line, cursor, namespace, private):
'''
'''
pass
| 9.421053
| 47
| 0.564246
| 18
| 179
| 5.611111
| 0.611111
| 0.19802
| 0.376238
| 0.514851
| 0.594059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.301676
| 179
| 18
| 48
| 9.944444
| 0.808
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
dd9568f59983fa2ea9af3d192ae665e564b47805
| 779
|
py
|
Python
|
date_difference.py
|
Matusf/One-Liners
|
93ebdffea93137b812ac29dd8e409931c7592f53
|
[
"MIT"
] | null | null | null |
date_difference.py
|
Matusf/One-Liners
|
93ebdffea93137b812ac29dd8e409931c7592f53
|
[
"MIT"
] | null | null | null |
date_difference.py
|
Matusf/One-Liners
|
93ebdffea93137b812ac29dd8e409931c7592f53
|
[
"MIT"
] | null | null | null |
'''
Snippet that displays difference (in days) between two given dates
Input:
date1 (list): First date in format <year> <month> <day>
date2 (list): First date in format <year> <month> <day>
Computes:
int: The difference between given dates
Expanded form:
print('Difference [days]: {}'.format(abs(
(__import__('datetime').date(
*[int(i) for i in input('<year> <month> <day>: ').split()]) -
__import__('datetime').date(
*[int(i) for i in input('<year> <month> <day>: ').split()])).days)))
'''
print('Difference [days]: {}'.format(abs((__import__('datetime').date(*[int(i) for i in input('<year> <month> <day>: ').split()]) - __import__('datetime').date(*[int(i) for i in input('<year> <month> <day>: ').split()])).days)))
| 41
| 228
| 0.602054
| 102
| 779
| 4.441176
| 0.313725
| 0.119205
| 0.15894
| 0.18543
| 0.728477
| 0.728477
| 0.728477
| 0.728477
| 0.582781
| 0.582781
| 0
| 0.00313
| 0.179718
| 779
| 18
| 229
| 43.277778
| 0.70579
| 0.69448
| 0
| 0
| 0
| 0
| 0.352174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
6600d17cc920e594b871497db94e8af41408c680
| 15,572
|
py
|
Python
|
argocd_python_client/api/certificate_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | 1
|
2021-11-20T13:37:43.000Z
|
2021-11-20T13:37:43.000Z
|
argocd_python_client/api/certificate_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | null | null | null |
argocd_python_client/api/certificate_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | null | null | null |
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from argocd_python_client.api_client import ApiClient, Endpoint as _Endpoint
from argocd_python_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from argocd_python_client.model.runtime_error import RuntimeError
from argocd_python_client.model.v1alpha1_repository_certificate_list import V1alpha1RepositoryCertificateList
class CertificateServiceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __certificate_service_create_certificate(
self,
body,
**kwargs
):
"""Creates repository certificates on the server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.certificate_service_create_certificate(body, async_req=True)
>>> result = thread.get()
Args:
body (V1alpha1RepositoryCertificateList): List of certificates to be created
Keyword Args:
upsert (bool): Whether to upsert already existing certificates.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1RepositoryCertificateList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.certificate_service_create_certificate = _Endpoint(
settings={
'response_type': (V1alpha1RepositoryCertificateList,),
'auth': [],
'endpoint_path': '/api/v1/certificates',
'operation_id': 'certificate_service_create_certificate',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body',
'upsert',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
(V1alpha1RepositoryCertificateList,),
'upsert':
(bool,),
},
'attribute_map': {
'upsert': 'upsert',
},
'location_map': {
'body': 'body',
'upsert': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__certificate_service_create_certificate
)
def __certificate_service_delete_certificate(
self,
**kwargs
):
"""Delete the certificates that match the RepositoryCertificateQuery # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.certificate_service_delete_certificate(async_req=True)
>>> result = thread.get()
Keyword Args:
host_name_pattern (str): A file-glob pattern (not regular expression) the host name has to match.. [optional]
cert_type (str): The type of the certificate to match (ssh or https).. [optional]
cert_sub_type (str): The sub type of the certificate to match (protocol dependent, usually only used for ssh certs).. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1RepositoryCertificateList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.certificate_service_delete_certificate = _Endpoint(
settings={
'response_type': (V1alpha1RepositoryCertificateList,),
'auth': [],
'endpoint_path': '/api/v1/certificates',
'operation_id': 'certificate_service_delete_certificate',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'host_name_pattern',
'cert_type',
'cert_sub_type',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'host_name_pattern':
(str,),
'cert_type':
(str,),
'cert_sub_type':
(str,),
},
'attribute_map': {
'host_name_pattern': 'hostNamePattern',
'cert_type': 'certType',
'cert_sub_type': 'certSubType',
},
'location_map': {
'host_name_pattern': 'query',
'cert_type': 'query',
'cert_sub_type': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__certificate_service_delete_certificate
)
def __certificate_service_list_certificates(
self,
**kwargs
):
"""List all available repository certificates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.certificate_service_list_certificates(async_req=True)
>>> result = thread.get()
Keyword Args:
host_name_pattern (str): A file-glob pattern (not regular expression) the host name has to match.. [optional]
cert_type (str): The type of the certificate to match (ssh or https).. [optional]
cert_sub_type (str): The sub type of the certificate to match (protocol dependent, usually only used for ssh certs).. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1RepositoryCertificateList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.certificate_service_list_certificates = _Endpoint(
settings={
'response_type': (V1alpha1RepositoryCertificateList,),
'auth': [],
'endpoint_path': '/api/v1/certificates',
'operation_id': 'certificate_service_list_certificates',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'host_name_pattern',
'cert_type',
'cert_sub_type',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'host_name_pattern':
(str,),
'cert_type':
(str,),
'cert_sub_type':
(str,),
},
'attribute_map': {
'host_name_pattern': 'hostNamePattern',
'cert_type': 'certType',
'cert_sub_type': 'certSubType',
},
'location_map': {
'host_name_pattern': 'query',
'cert_type': 'query',
'cert_sub_type': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__certificate_service_list_certificates
)
| 38.35468
| 144
| 0.494349
| 1,356
| 15,572
| 5.412979
| 0.15413
| 0.025749
| 0.021253
| 0.022071
| 0.805177
| 0.791008
| 0.78733
| 0.781335
| 0.781335
| 0.781335
| 0
| 0.005266
| 0.426792
| 15,572
| 405
| 145
| 38.449383
| 0.817051
| 0.347483
| 0
| 0.616541
| 1
| 0
| 0.218844
| 0.034425
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015038
| false
| 0
| 0.022556
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b0fe6e8474b353183949aba6ca0152c287bd859f
| 104,254
|
py
|
Python
|
src/swift_configs.py
|
ska-telescope/ska-sdp-distributed-fourier-transform
|
4c6c42cc885166960119f80b7c69550c9cd840df
|
[
"BSD-3-Clause"
] | null | null | null |
src/swift_configs.py
|
ska-telescope/ska-sdp-distributed-fourier-transform
|
4c6c42cc885166960119f80b7c69550c9cd840df
|
[
"BSD-3-Clause"
] | null | null | null |
src/swift_configs.py
|
ska-telescope/ska-sdp-distributed-fourier-transform
|
4c6c42cc885166960119f80b7c69550c9cd840df
|
[
"BSD-3-Clause"
] | null | null | null |
# pylint: skip-file
"""
Configuration naming convention:
<image size>[<fov>]-n?<padded facet size>-<padded subgrid size>
Effectiveness percent gives communication overhead (100% would be
not transferring any redundant data). If "n" is given, it is a
new-style configuration with yN_size = yP_size. This results
generally in being able to cover the image with fewer facets, for
instance:
"12k[1]-8k-384": ..., # nfacet=4², eff 60.6%
"12k[1]-n8k-384": ..., # nfacet=2², eff 66.4%
"12k[1]-n4k-384": ..., # nfacet=4², eff 57.8%
I.e. whereas before we would need 4 8k facets to cover 12k, with
new-style configuration 2 is enough, so we can actually make facets
half as big, which is much cheaper to compute. Note however that
over-the-wire efficiency *decreases* a bit (not always the case, but
generally true because without image-space resampling we have less
freedom in parameter choice)
Configurations are sorted by "families" with a fixed N:yP_size ratio
while keeping subgrid size constant (which generally leads to an
equivalent configuration / same efficiency).
"""
SWIFT_CONFIGS = {
"128k[1]-n32k-512": dict(
W=11.0,
fov=1,
N=131072,
Nx=64,
yB_size=45056,
yN_size=65536,
yP_size=65536,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"112k[1]-n56k-512": dict(
W=11.0,
fov=1,
N=114688,
Nx=64,
yB_size=39424,
yN_size=57344,
yP_size=57344,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"96k[1]-n48k-512": dict(
W=11.0,
fov=1,
N=98304,
Nx=64,
yB_size=33792,
yN_size=49152,
yP_size=49152,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"72k[1]-n36k-512": dict(
W=11.0,
fov=1,
N=73728,
Nx=64,
yB_size=25344,
yN_size=36864,
yP_size=36864,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"64k[1]-n32k-512": dict(
W=11.0,
fov=1,
N=65536,
Nx=64,
yB_size=22528,
yN_size=32768,
yP_size=32768,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"32k[1]-n16k-512": dict(
W=11.0,
fov=1,
N=32768,
Nx=64,
yB_size=11264,
yN_size=16384,
yP_size=16384,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"16k[1]-n8k-512": dict(
W=11.0,
fov=1,
N=16384,
Nx=64,
yB_size=5632,
yN_size=8192,
yP_size=8192,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"14k[1]-n7k-512": dict(
W=11.0,
fov=1,
N=14336,
Nx=64,
yB_size=4928,
yN_size=7168,
yP_size=7168,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"12k[1]-n6k-512": dict(
W=11.0,
fov=1,
N=12288,
Nx=64,
yB_size=4224,
yN_size=6144,
yP_size=6144,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"10k[1]-n5k-512": dict(
W=11.0,
fov=1,
N=10240,
Nx=64,
yB_size=3520,
yN_size=5120,
yP_size=5120,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"8k[1]-n4k-512": dict(
W=11.0,
fov=1,
N=8192,
Nx=64,
yB_size=2816,
yN_size=4096,
yP_size=4096,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"7k[1]-n3584-512": dict(
W=11.0,
fov=1,
N=7168,
Nx=64,
yB_size=2464,
yN_size=3584,
yP_size=3584,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"6k[1]-n3k-512": dict(
W=11.0,
fov=1,
N=6144,
Nx=64,
yB_size=2112,
yN_size=3072,
yP_size=3072,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"5k[1]-n2560-512": dict(
W=11.0,
fov=1,
N=5120,
Nx=64,
yB_size=1760,
yN_size=2560,
yP_size=2560,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"4k[1]-n2k-512": dict(
W=11.0,
fov=1,
N=4096,
Nx=64,
yB_size=1408,
yN_size=2048,
yP_size=2048,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"3584[1]-n1792-512": dict(
W=11.0,
fov=1,
N=3584,
Nx=64,
yB_size=1232,
yN_size=1792,
yP_size=1792,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"3k[1]-n1536-512": dict(
W=11.0,
fov=1,
N=3072,
Nx=64,
yB_size=1056,
yN_size=1536,
yP_size=1536,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"2560[1]-n1280-512": dict(
W=11.0,
fov=1,
N=2560,
Nx=64,
yB_size=880,
yN_size=1280,
yP_size=1280,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"2k[1]-n1k-512": dict(
W=11.0,
fov=1,
N=2048,
Nx=64,
yB_size=704,
yN_size=1024,
yP_size=1024,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"1536[1]-n768-512": dict(
W=11.0,
fov=1,
N=1536,
Nx=64,
yB_size=528,
yN_size=768,
yP_size=768,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"1k[1]-n512-512": dict(
W=11.0,
fov=1,
N=1024,
Nx=64,
yB_size=352,
yN_size=512,
yP_size=512,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 58.3%
"14k[1]-n8k-448": dict(
W=16.5,
fov=1,
N=14336,
Nx=56,
yB_size=7168,
yN_size=8192,
yP_size=8192,
xA_size=392,
xM_size=448,
), # nfacet=2², eff 76.6%
"7k[1]-n4k-448": dict(
W=16.5,
fov=1,
N=7168,
Nx=56,
yB_size=3584,
yN_size=4096,
yP_size=4096,
xA_size=392,
xM_size=448,
), # nfacet=2², eff 76.6%
"3584[1]-n2k-448": dict(
W=16.5,
fov=1,
N=3584,
Nx=56,
yB_size=1792,
yN_size=2048,
yP_size=2048,
xA_size=392,
xM_size=448,
), # nfacet=2², eff 76.6%
"1792[1]-n1k-448": dict(
W=16.5,
fov=1,
N=1792,
Nx=56,
yB_size=896,
yN_size=1024,
yP_size=1024,
xA_size=392,
xM_size=448,
), # nfacet=2², eff 76.6%
"12k[1]-n8k-384": dict(
W=12.0,
fov=1,
N=12288,
Nx=4,
yB_size=6144,
yN_size=8192,
yP_size=8192,
xA_size=340,
xM_size=384,
), # nfacet=2², eff 66.4%
"6k[1]-n4k-384": dict(
W=12.0,
fov=1,
N=6144,
Nx=4,
yB_size=3072,
yN_size=4096,
yP_size=4096,
xA_size=340,
xM_size=384,
), # nfacet=2², eff 66.4%
"3k[1]-n2k-384": dict(
W=12.0,
fov=1,
N=3072,
Nx=4,
yB_size=1536,
yN_size=2048,
yP_size=2048,
xA_size=340,
xM_size=384,
), # nfacet=2², eff 66.4%
"1536[1]-n1k-384": dict(
W=12.0,
fov=1,
N=1536,
Nx=4,
yB_size=768,
yN_size=1024,
yP_size=1024,
xA_size=340,
xM_size=384,
), # nfacet=2², eff 66.4%
"10k[1]-n8k-320": dict(
W=10.25,
fov=1,
N=10240,
Nx=4,
yB_size=5120,
yN_size=8192,
yP_size=8192,
xA_size=292,
xM_size=320,
), # nfacet=2², eff 57.0%
"5k[1]-n4k-320": dict(
W=10.25,
fov=1,
N=5120,
Nx=4,
yB_size=2560,
yN_size=4096,
yP_size=4096,
xA_size=292,
xM_size=320,
), # nfacet=2², eff 57.0%
"2560[1]-n2k-320": dict(
W=10.25,
fov=1,
N=2560,
Nx=4,
yB_size=1280,
yN_size=2048,
yP_size=2048,
xA_size=292,
xM_size=320,
), # nfacet=2², eff 57.0%
"1280[1]-n1k-320": dict(
W=10.25,
fov=1,
N=1280,
Nx=4,
yB_size=640,
yN_size=1024,
yP_size=1024,
xA_size=292,
xM_size=320,
), # nfacet=2², eff 57.0%
"16k[1]-n16k-256": dict(
W=9.25,
fov=1,
N=16384,
Nx=4,
yB_size=8192,
yN_size=16384,
yP_size=16384,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"14k[1]-n14k-256": dict(
W=9.25,
fov=1,
N=14336,
Nx=4,
yB_size=7168,
yN_size=14336,
yP_size=14336,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"12k[1]-n12k-256": dict(
W=9.25,
fov=1,
N=12288,
Nx=4,
yB_size=6144,
yN_size=12288,
yP_size=12288,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"10k[1]-n10k-256": dict(
W=9.25,
fov=1,
N=10240,
Nx=4,
yB_size=5120,
yN_size=10240,
yP_size=10240,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"8k[1]-n8k-256": dict(
W=9.25,
fov=1,
N=8192,
Nx=4,
yB_size=4096,
yN_size=8192,
yP_size=8192,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"7k[1]-n7k-256": dict(
W=9.25,
fov=1,
N=7168,
Nx=4,
yB_size=3584,
yN_size=7168,
yP_size=7168,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"6k[1]-n6k-256": dict(
W=9.25,
fov=1,
N=6144,
Nx=4,
yB_size=3072,
yN_size=6144,
yP_size=6144,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"5k[1]-n5k-256": dict(
W=9.25,
fov=1,
N=5120,
Nx=4,
yB_size=2560,
yN_size=5120,
yP_size=5120,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"4k[1]-n4k-256": dict(
W=9.25,
fov=1,
N=4096,
Nx=4,
yB_size=2048,
yN_size=4096,
yP_size=4096,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"3584[1]-n3584-256": dict(
W=9.25,
fov=1,
N=3584,
Nx=4,
yB_size=1792,
yN_size=3584,
yP_size=3584,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"3k[1]-n3k-256": dict(
W=9.25,
fov=1,
N=3072,
Nx=4,
yB_size=1536,
yN_size=3072,
yP_size=3072,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"2560[1]-n2560-256": dict(
W=9.25,
fov=1,
N=2560,
Nx=4,
yB_size=1280,
yN_size=2560,
yP_size=2560,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"2k[1]-n2k-256": dict(
W=9.25,
fov=1,
N=2048,
Nx=4,
yB_size=1024,
yN_size=2048,
yP_size=2048,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"1792[1]-n1792-256": dict(
W=9.25,
fov=1,
N=1792,
Nx=4,
yB_size=896,
yN_size=1792,
yP_size=1792,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"1536[1]-n1536-256": dict(
W=9.25,
fov=1,
N=1536,
Nx=4,
yB_size=768,
yN_size=1536,
yP_size=1536,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"1280[1]-n1280-256": dict(
W=9.25,
fov=1,
N=1280,
Nx=4,
yB_size=640,
yN_size=1280,
yP_size=1280,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"1k[1]-n1k-256": dict(
W=9.25,
fov=1,
N=1024,
Nx=4,
yB_size=512,
yN_size=1024,
yP_size=1024,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 46.1%
"16k[1]-n7k-512": dict(
W=12.875,
fov=1,
N=16384,
Nx=64,
yB_size=5632,
yN_size=7168,
yP_size=7168,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"14k[1]-n6272-512": dict(
W=12.875,
fov=1,
N=14336,
Nx=64,
yB_size=4928,
yN_size=6272,
yP_size=6272,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"12k[1]-n5376-512": dict(
W=12.875,
fov=1,
N=12288,
Nx=64,
yB_size=4224,
yN_size=5376,
yP_size=5376,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"10k[1]-n4480-512": dict(
W=12.875,
fov=1,
N=10240,
Nx=64,
yB_size=3520,
yN_size=4480,
yP_size=4480,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"8k[1]-n3584-512": dict(
W=12.875,
fov=1,
N=8192,
Nx=64,
yB_size=2816,
yN_size=3584,
yP_size=3584,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"7k[1]-n3136-512": dict(
W=12.875,
fov=1,
N=7168,
Nx=64,
yB_size=2464,
yN_size=3136,
yP_size=3136,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"6k[1]-n2688-512": dict(
W=12.875,
fov=1,
N=6144,
Nx=64,
yB_size=2112,
yN_size=2688,
yP_size=2688,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"5k[1]-n2240-512": dict(
W=12.875,
fov=1,
N=5120,
Nx=64,
yB_size=1760,
yN_size=2240,
yP_size=2240,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"4k[1]-n1792-512": dict(
W=12.875,
fov=1,
N=4096,
Nx=64,
yB_size=1408,
yN_size=1792,
yP_size=1792,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"3584[1]-n1568-512": dict(
W=12.875,
fov=1,
N=3584,
Nx=64,
yB_size=1232,
yN_size=1568,
yP_size=1568,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"3k[1]-n1344-512": dict(
W=12.875,
fov=1,
N=3072,
Nx=64,
yB_size=1056,
yN_size=1344,
yP_size=1344,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"2560[1]-n1120-512": dict(
W=12.875,
fov=1,
N=2560,
Nx=64,
yB_size=880,
yN_size=1120,
yP_size=1120,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"2k[1]-n896-512": dict(
W=12.875,
fov=1,
N=2048,
Nx=64,
yB_size=704,
yN_size=896,
yP_size=896,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"1536[1]-n672-512": dict(
W=12.875,
fov=1,
N=1536,
Nx=64,
yB_size=528,
yN_size=672,
yP_size=672,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"1k[1]-n448-512": dict(
W=12.875,
fov=1,
N=1024,
Nx=64,
yB_size=352,
yN_size=448,
yP_size=448,
xA_size=448,
xM_size=512,
), # nfacet=3², eff 66.7%
"14k[1]-n7k-448": dict(
W=10.875,
fov=1,
N=14336,
Nx=56,
yB_size=4864,
yN_size=7168,
yP_size=7168,
xA_size=392,
xM_size=448,
), # nfacet=3², eff 58.3%
"7k[1]-n3584-448": dict(
W=10.875,
fov=1,
N=7168,
Nx=56,
yB_size=2432,
yN_size=3584,
yP_size=3584,
xA_size=392,
xM_size=448,
), # nfacet=3², eff 58.3%
"3584[1]-n1792-448": dict(
W=10.875,
fov=1,
N=3584,
Nx=56,
yB_size=1216,
yN_size=1792,
yP_size=1792,
xA_size=392,
xM_size=448,
), # nfacet=3², eff 58.3%
"1792[1]-n896-448": dict(
W=10.875,
fov=1,
N=1792,
Nx=56,
yB_size=608,
yN_size=896,
yP_size=896,
xA_size=392,
xM_size=448,
), # nfacet=3², eff 58.3%
"12k[1]-n7k-384": dict(
W=15.5,
fov=1,
N=12288,
Nx=48,
yB_size=6144,
yN_size=7168,
yP_size=7168,
xA_size=336,
xM_size=384,
), # nfacet=2², eff 75.0%
"6k[1]-n3584-384": dict(
W=15.5,
fov=1,
N=6144,
Nx=48,
yB_size=3072,
yN_size=3584,
yP_size=3584,
xA_size=336,
xM_size=384,
), # nfacet=2², eff 75.0%
"3k[1]-n1792-384": dict(
W=15.5,
fov=1,
N=3072,
Nx=48,
yB_size=1536,
yN_size=1792,
yP_size=1792,
xA_size=336,
xM_size=384,
), # nfacet=2², eff 75.0%
"1536[1]-n896-384": dict(
W=15.5,
fov=1,
N=1536,
Nx=48,
yB_size=768,
yN_size=896,
yP_size=896,
xA_size=336,
xM_size=384,
), # nfacet=2², eff 75.0%
"10k[1]-n7k-320": dict(
W=11.375,
fov=1,
N=10240,
Nx=4,
yB_size=5120,
yN_size=7168,
yP_size=7168,
xA_size=292,
xM_size=320,
), # nfacet=2², eff 65.2%
"5k[1]-n3584-320": dict(
W=11.375,
fov=1,
N=5120,
Nx=4,
yB_size=2560,
yN_size=3584,
yP_size=3584,
xA_size=292,
xM_size=320,
), # nfacet=2², eff 65.2%
"2560[1]-n1792-320": dict(
W=11.375,
fov=1,
N=2560,
Nx=4,
yB_size=1280,
yN_size=1792,
yP_size=1792,
xA_size=292,
xM_size=320,
), # nfacet=2², eff 65.2%
"1280[1]-n896-320": dict(
W=11.375,
fov=1,
N=1280,
Nx=4,
yB_size=640,
yN_size=896,
yP_size=896,
xA_size=292,
xM_size=320,
), # nfacet=2², eff 65.2%
"16k[1]-n14k-256": dict(
W=9.75,
fov=1,
N=16384,
Nx=4,
yB_size=8192,
yN_size=14336,
yP_size=14336,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"14k[1]-n12544-256": dict(
W=9.75,
fov=1,
N=14336,
Nx=4,
yB_size=7168,
yN_size=12544,
yP_size=12544,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"12k[1]-n10752-256": dict(
W=9.75,
fov=1,
N=12288,
Nx=4,
yB_size=6144,
yN_size=10752,
yP_size=10752,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"10k[1]-n8960-256": dict(
W=9.75,
fov=1,
N=10240,
Nx=4,
yB_size=5120,
yN_size=8960,
yP_size=8960,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"8k[1]-n7k-256": dict(
W=9.75,
fov=1,
N=8192,
Nx=4,
yB_size=4096,
yN_size=7168,
yP_size=7168,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"7k[1]-n6272-256": dict(
W=9.75,
fov=1,
N=7168,
Nx=4,
yB_size=3584,
yN_size=6272,
yP_size=6272,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"6k[1]-n5376-256": dict(
W=9.75,
fov=1,
N=6144,
Nx=4,
yB_size=3072,
yN_size=5376,
yP_size=5376,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"5k[1]-n4480-256": dict(
W=9.75,
fov=1,
N=5120,
Nx=4,
yB_size=2560,
yN_size=4480,
yP_size=4480,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"4k[1]-n3584-256": dict(
W=9.75,
fov=1,
N=4096,
Nx=4,
yB_size=2048,
yN_size=3584,
yP_size=3584,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"3584[1]-n3136-256": dict(
W=9.75,
fov=1,
N=3584,
Nx=4,
yB_size=1792,
yN_size=3136,
yP_size=3136,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"3k[1]-n2688-256": dict(
W=9.75,
fov=1,
N=3072,
Nx=4,
yB_size=1536,
yN_size=2688,
yP_size=2688,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"2560[1]-n2240-256": dict(
W=9.75,
fov=1,
N=2560,
Nx=4,
yB_size=1280,
yN_size=2240,
yP_size=2240,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"2k[1]-n1792-256": dict(
W=9.75,
fov=1,
N=2048,
Nx=4,
yB_size=1024,
yN_size=1792,
yP_size=1792,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"1792[1]-n1568-256": dict(
W=9.75,
fov=1,
N=1792,
Nx=4,
yB_size=896,
yN_size=1568,
yP_size=1568,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"1536[1]-n1344-256": dict(
W=9.75,
fov=1,
N=1536,
Nx=4,
yB_size=768,
yN_size=1344,
yP_size=1344,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"1280[1]-n1120-256": dict(
W=9.75,
fov=1,
N=1280,
Nx=4,
yB_size=640,
yN_size=1120,
yP_size=1120,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"1k[1]-n896-256": dict(
W=9.75,
fov=1,
N=1024,
Nx=4,
yB_size=512,
yN_size=896,
yP_size=896,
xA_size=236,
xM_size=256,
), # nfacet=2², eff 52.7%
"16k[1]-n6k-512": dict(
W=20.125,
fov=1,
N=16384,
Nx=64,
yB_size=5632,
yN_size=6144,
yP_size=6144,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"14k[1]-n5376-512": dict(
W=20.125,
fov=1,
N=14336,
Nx=64,
yB_size=4928,
yN_size=5376,
yP_size=5376,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"12k[1]-n4608-512": dict(
W=20.125,
fov=1,
N=12288,
Nx=64,
yB_size=4224,
yN_size=4608,
yP_size=4608,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"10k[1]-n3840-512": dict(
W=20.125,
fov=1,
N=10240,
Nx=64,
yB_size=3520,
yN_size=3840,
yP_size=3840,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"8k[1]-n3k-512": dict(
W=20.125,
fov=1,
N=8192,
Nx=64,
yB_size=2816,
yN_size=3072,
yP_size=3072,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"7k[1]-n2688-512": dict(
W=20.125,
fov=1,
N=7168,
Nx=64,
yB_size=2464,
yN_size=2688,
yP_size=2688,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"6k[1]-n2304-512": dict(
W=20.125,
fov=1,
N=6144,
Nx=64,
yB_size=2112,
yN_size=2304,
yP_size=2304,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"5k[1]-n1920-512": dict(
W=20.125,
fov=1,
N=5120,
Nx=64,
yB_size=1760,
yN_size=1920,
yP_size=1920,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"4k[1]-n1536-512": dict(
W=20.125,
fov=1,
N=4096,
Nx=64,
yB_size=1408,
yN_size=1536,
yP_size=1536,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"3584[1]-n1344-512": dict(
W=20.125,
fov=1,
N=3584,
Nx=64,
yB_size=1232,
yN_size=1344,
yP_size=1344,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"3k[1]-n1152-512": dict(
W=20.125,
fov=1,
N=3072,
Nx=64,
yB_size=1056,
yN_size=1152,
yP_size=1152,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"2560[1]-n960-512": dict(
W=20.125,
fov=1,
N=2560,
Nx=64,
yB_size=880,
yN_size=960,
yP_size=960,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"2k[1]-n768-512": dict(
W=20.125,
fov=1,
N=2048,
Nx=64,
yB_size=704,
yN_size=768,
yP_size=768,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"1536[1]-n576-512": dict(
W=20.125,
fov=1,
N=1536,
Nx=64,
yB_size=528,
yN_size=576,
yP_size=576,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"1k[1]-n384-512": dict(
W=20.125,
fov=1,
N=1024,
Nx=64,
yB_size=352,
yN_size=384,
yP_size=384,
xA_size=320,
xM_size=512,
), # nfacet=3², eff 55.6%
"14k[1]-n6k-448": dict(
W=13.0,
fov=1,
N=14336,
Nx=56,
yB_size=4864,
yN_size=6144,
yP_size=6144,
xA_size=392,
xM_size=448,
), # nfacet=3², eff 68.1%
"7k[1]-n3k-448": dict(
W=13.0,
fov=1,
N=7168,
Nx=56,
yB_size=2432,
yN_size=3072,
yP_size=3072,
xA_size=392,
xM_size=448,
), # nfacet=3², eff 68.1%
"3584[1]-n1536-448": dict(
W=13.0,
fov=1,
N=3584,
Nx=56,
yB_size=1216,
yN_size=1536,
yP_size=1536,
xA_size=392,
xM_size=448,
), # nfacet=3², eff 68.1%
"1792[1]-n768-448": dict(
W=13.0,
fov=1,
N=1792,
Nx=56,
yB_size=608,
yN_size=768,
yP_size=768,
xA_size=392,
xM_size=448,
), # nfacet=3², eff 68.1%
"12k[1]-n6k-384": dict(
W=10.75,
fov=1,
N=12288,
Nx=3,
yB_size=4096,
yN_size=6144,
yP_size=6144,
xA_size=345,
xM_size=384,
), # nfacet=3², eff 59.9%
"6k[1]-n3k-384": dict(
W=10.75,
fov=1,
N=6144,
Nx=3,
yB_size=2048,
yN_size=3072,
yP_size=3072,
xA_size=345,
xM_size=384,
), # nfacet=3², eff 59.9%
"3k[1]-n1536-384": dict(
W=10.75,
fov=1,
N=3072,
Nx=3,
yB_size=1024,
yN_size=1536,
yP_size=1536,
xA_size=345,
xM_size=384,
), # nfacet=3², eff 59.9%
"1536[1]-n768-384": dict(
W=10.75,
fov=1,
N=1536,
Nx=3,
yB_size=512,
yN_size=768,
yP_size=768,
xA_size=345,
xM_size=384,
), # nfacet=3², eff 59.9%
"10k[1]-n6k-320": dict(
W=14.375,
fov=1,
N=10240,
Nx=4,
yB_size=5120,
yN_size=6144,
yP_size=6144,
xA_size=276,
xM_size=320,
), # nfacet=2², eff 71.9%
"5k[1]-n3k-320": dict(
W=14.375,
fov=1,
N=5120,
Nx=4,
yB_size=2560,
yN_size=3072,
yP_size=3072,
xA_size=276,
xM_size=320,
), # nfacet=2², eff 71.9%
"2560[1]-n1536-320": dict(
W=14.375,
fov=1,
N=2560,
Nx=4,
yB_size=1280,
yN_size=1536,
yP_size=1536,
xA_size=276,
xM_size=320,
), # nfacet=2², eff 71.9%
"1280[1]-n768-320": dict(
W=14.375,
fov=1,
N=1280,
Nx=4,
yB_size=640,
yN_size=768,
yP_size=768,
xA_size=276,
xM_size=320,
), # nfacet=2², eff 71.9%
"16k[1]-n12k-256": dict(
W=10.75,
fov=1,
N=16384,
Nx=32,
yB_size=8192,
yN_size=12288,
yP_size=12288,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"14k[1]-n10752-256": dict(
W=10.75,
fov=1,
N=14336,
Nx=32,
yB_size=7168,
yN_size=10752,
yP_size=10752,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"12k[1]-n9k-256": dict(
W=10.75,
fov=1,
N=12288,
Nx=32,
yB_size=6144,
yN_size=9216,
yP_size=9216,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"10k[1]-n7680-256": dict(
W=10.75,
fov=1,
N=10240,
Nx=32,
yB_size=5120,
yN_size=7680,
yP_size=7680,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"8k[1]-n6k-256": dict(
W=10.75,
fov=1,
N=8192,
Nx=32,
yB_size=4096,
yN_size=6144,
yP_size=6144,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"7k[1]-n5376-256": dict(
W=10.75,
fov=1,
N=7168,
Nx=32,
yB_size=3584,
yN_size=5376,
yP_size=5376,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"6k[1]-n4608-256": dict(
W=10.75,
fov=1,
N=6144,
Nx=32,
yB_size=3072,
yN_size=4608,
yP_size=4608,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"5k[1]-n3840-256": dict(
W=10.75,
fov=1,
N=5120,
Nx=32,
yB_size=2560,
yN_size=3840,
yP_size=3840,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"4k[1]-n3k-256": dict(
W=10.75,
fov=1,
N=4096,
Nx=32,
yB_size=2048,
yN_size=3072,
yP_size=3072,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"3584[1]-n2688-256": dict(
W=10.75,
fov=1,
N=3584,
Nx=32,
yB_size=1792,
yN_size=2688,
yP_size=2688,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"3k[1]-n2304-256": dict(
W=10.75,
fov=1,
N=3072,
Nx=32,
yB_size=1536,
yN_size=2304,
yP_size=2304,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"2560[1]-n1920-256": dict(
W=10.75,
fov=1,
N=2560,
Nx=32,
yB_size=1280,
yN_size=1920,
yP_size=1920,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"2k[1]-n1536-256": dict(
W=10.75,
fov=1,
N=2048,
Nx=32,
yB_size=1024,
yN_size=1536,
yP_size=1536,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"1792[1]-n1344-256": dict(
W=10.75,
fov=1,
N=1792,
Nx=32,
yB_size=896,
yN_size=1344,
yP_size=1344,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"1536[1]-n1152-256": dict(
W=10.75,
fov=1,
N=1536,
Nx=32,
yB_size=768,
yN_size=1152,
yP_size=1152,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"1280[1]-n960-256": dict(
W=10.75,
fov=1,
N=1280,
Nx=32,
yB_size=640,
yN_size=960,
yP_size=960,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"1k[1]-n768-256": dict(
W=10.75,
fov=1,
N=1024,
Nx=32,
yB_size=512,
yN_size=768,
yP_size=768,
xA_size=224,
xM_size=256,
), # nfacet=2², eff 58.3%
"16k[1]-n5k-512": dict(
W=13.25,
fov=1,
N=16384,
Nx=8,
yB_size=4096,
yN_size=5120,
yP_size=5120,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"14k[1]-n4480-512": dict(
W=13.25,
fov=1,
N=14336,
Nx=8,
yB_size=3584,
yN_size=4480,
yP_size=4480,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"12k[1]-n3840-512": dict(
W=13.25,
fov=1,
N=12288,
Nx=8,
yB_size=3072,
yN_size=3840,
yP_size=3840,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"10k[1]-n3200-512": dict(
W=13.25,
fov=1,
N=10240,
Nx=8,
yB_size=2560,
yN_size=3200,
yP_size=3200,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"8k[1]-n2560-512": dict(
W=13.25,
fov=1,
N=8192,
Nx=8,
yB_size=2048,
yN_size=2560,
yP_size=2560,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"7k[1]-n2240-512": dict(
W=13.25,
fov=1,
N=7168,
Nx=8,
yB_size=1792,
yN_size=2240,
yP_size=2240,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"6k[1]-n1920-512": dict(
W=13.25,
fov=1,
N=6144,
Nx=8,
yB_size=1536,
yN_size=1920,
yP_size=1920,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"5k[1]-n1600-512": dict(
W=13.25,
fov=1,
N=5120,
Nx=8,
yB_size=1280,
yN_size=1600,
yP_size=1600,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"4k[1]-n1280-512": dict(
W=13.25,
fov=1,
N=4096,
Nx=8,
yB_size=1024,
yN_size=1280,
yP_size=1280,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"3584[1]-n1120-512": dict(
W=13.25,
fov=1,
N=3584,
Nx=8,
yB_size=896,
yN_size=1120,
yP_size=1120,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"3k[1]-n960-512": dict(
W=13.25,
fov=1,
N=3072,
Nx=8,
yB_size=768,
yN_size=960,
yP_size=960,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"2560[1]-n800-512": dict(
W=13.25,
fov=1,
N=2560,
Nx=8,
yB_size=640,
yN_size=800,
yP_size=800,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"2k[1]-n640-512": dict(
W=13.25,
fov=1,
N=2048,
Nx=8,
yB_size=512,
yN_size=640,
yP_size=640,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"1536[1]-n480-512": dict(
W=13.25,
fov=1,
N=1536,
Nx=8,
yB_size=384,
yN_size=480,
yP_size=480,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"1k[1]-n320-512": dict(
W=13.25,
fov=1,
N=1024,
Nx=8,
yB_size=256,
yN_size=320,
yP_size=320,
xA_size=440,
xM_size=512,
), # nfacet=4², eff 68.8%
"14k[1]-n5k-448": dict(
W=11.125,
fov=1,
N=14336,
Nx=56,
yB_size=3584,
yN_size=5120,
yP_size=5120,
xA_size=392,
xM_size=448,
), # nfacet=4², eff 61.2%
"7k[1]-n2560-448": dict(
W=11.125,
fov=1,
N=7168,
Nx=56,
yB_size=1792,
yN_size=2560,
yP_size=2560,
xA_size=392,
xM_size=448,
), # nfacet=4², eff 61.2%
"3584[1]-n1280-448": dict(
W=11.125,
fov=1,
N=3584,
Nx=56,
yB_size=896,
yN_size=1280,
yP_size=1280,
xA_size=392,
xM_size=448,
), # nfacet=4², eff 61.2%
"1792[1]-n640-448": dict(
W=11.125,
fov=1,
N=1792,
Nx=56,
yB_size=448,
yN_size=640,
yP_size=640,
xA_size=392,
xM_size=448,
), # nfacet=4², eff 61.2%
"12k[1]-n5k-384": dict(
W=13.25,
fov=1,
N=12288,
Nx=6,
yB_size=4096,
yN_size=5120,
yP_size=5120,
xA_size=330,
xM_size=384,
), # nfacet=3², eff 68.8%
"6k[1]-n2560-384": dict(
W=13.25,
fov=1,
N=6144,
Nx=6,
yB_size=2048,
yN_size=2560,
yP_size=2560,
xA_size=330,
xM_size=384,
), # nfacet=3², eff 68.8%
"3k[1]-n1280-384": dict(
W=13.25,
fov=1,
N=3072,
Nx=6,
yB_size=1024,
yN_size=1280,
yP_size=1280,
xA_size=330,
xM_size=384,
), # nfacet=3², eff 68.8%
"1536[1]-n640-384": dict(
W=13.25,
fov=1,
N=1536,
Nx=6,
yB_size=512,
yN_size=640,
yP_size=640,
xA_size=330,
xM_size=384,
), # nfacet=3², eff 68.8%
"10k[1]-n5k-320": dict(
W=11.125,
fov=1,
N=10240,
Nx=40,
yB_size=3584,
yN_size=5120,
yP_size=5120,
xA_size=280,
xM_size=320,
), # nfacet=3², eff 58.3%
"5k[1]-n2560-320": dict(
W=11.125,
fov=1,
N=5120,
Nx=40,
yB_size=1792,
yN_size=2560,
yP_size=2560,
xA_size=280,
xM_size=320,
), # nfacet=3², eff 58.3%
"2560[1]-n1280-320": dict(
W=11.125,
fov=1,
N=2560,
Nx=40,
yB_size=896,
yN_size=1280,
yP_size=1280,
xA_size=280,
xM_size=320,
), # nfacet=3², eff 58.3%
"1280[1]-n640-320": dict(
W=11.125,
fov=1,
N=1280,
Nx=40,
yB_size=448,
yN_size=640,
yP_size=640,
xA_size=280,
xM_size=320,
), # nfacet=3², eff 58.3%
"16k[1]-n10k-256": dict(
W=13.25,
fov=1,
N=16384,
Nx=4,
yB_size=8192,
yN_size=10240,
yP_size=10240,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"14k[1]-n8960-256": dict(
W=13.25,
fov=1,
N=14336,
Nx=4,
yB_size=7168,
yN_size=8960,
yP_size=8960,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"12k[1]-n7680-256": dict(
W=13.25,
fov=1,
N=12288,
Nx=4,
yB_size=6144,
yN_size=7680,
yP_size=7680,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"10k[1]-n6400-256": dict(
W=13.25,
fov=1,
N=10240,
Nx=4,
yB_size=5120,
yN_size=6400,
yP_size=6400,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"8k[1]-n5k-256": dict(
W=13.25,
fov=1,
N=8192,
Nx=4,
yB_size=4096,
yN_size=5120,
yP_size=5120,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"7k[1]-n4480-256": dict(
W=13.25,
fov=1,
N=7168,
Nx=4,
yB_size=3584,
yN_size=4480,
yP_size=4480,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"6k[1]-n3840-256": dict(
W=13.25,
fov=1,
N=6144,
Nx=4,
yB_size=3072,
yN_size=3840,
yP_size=3840,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"5k[1]-n3200-256": dict(
W=13.25,
fov=1,
N=5120,
Nx=4,
yB_size=2560,
yN_size=3200,
yP_size=3200,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"4k[1]-n2560-256": dict(
W=13.25,
fov=1,
N=4096,
Nx=4,
yB_size=2048,
yN_size=2560,
yP_size=2560,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"3584[1]-n2240-256": dict(
W=13.25,
fov=1,
N=3584,
Nx=4,
yB_size=1792,
yN_size=2240,
yP_size=2240,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"3k[1]-n1920-256": dict(
W=13.25,
fov=1,
N=3072,
Nx=4,
yB_size=1536,
yN_size=1920,
yP_size=1920,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"2560[1]-n1600-256": dict(
W=13.25,
fov=1,
N=2560,
Nx=4,
yB_size=1280,
yN_size=1600,
yP_size=1600,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"2k[1]-n1280-256": dict(
W=13.25,
fov=1,
N=2048,
Nx=4,
yB_size=1024,
yN_size=1280,
yP_size=1280,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"1792[1]-n1120-256": dict(
W=13.25,
fov=1,
N=1792,
Nx=4,
yB_size=896,
yN_size=1120,
yP_size=1120,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"1536[1]-n960-256": dict(
W=13.25,
fov=1,
N=1536,
Nx=4,
yB_size=768,
yN_size=960,
yP_size=960,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"1280[1]-n800-256": dict(
W=13.25,
fov=1,
N=1280,
Nx=4,
yB_size=640,
yN_size=800,
yP_size=800,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"1k[1]-n640-256": dict(
W=13.25,
fov=1,
N=1024,
Nx=4,
yB_size=512,
yN_size=640,
yP_size=640,
xA_size=220,
xM_size=256,
), # nfacet=2², eff 68.8%
"96k[1]-n24k-512": dict(
W=13.625,
fov=1,
N=98304,
Nx=64,
yB_size=19968,
yN_size=32768,
yP_size=32768,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"64k[1]-n16k-512": dict(
W=13.625,
fov=1,
N=65536,
Nx=64,
yB_size=13312,
yN_size=16384,
yP_size=16384,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"32k[1]-n8k-512": dict(
W=13.625,
fov=1,
N=32768,
Nx=64,
yB_size=6656,
yN_size=8192,
yP_size=8192,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"16k[1]-n4k-512": dict(
W=13.625,
fov=1,
N=16384,
Nx=64,
yB_size=3328,
yN_size=4096,
yP_size=4096,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"14k[1]-n3584-512": dict(
W=13.625,
fov=1,
N=14336,
Nx=64,
yB_size=2912,
yN_size=3584,
yP_size=3584,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"12k[1]-n3k-512": dict(
W=13.625,
fov=1,
N=12288,
Nx=64,
yB_size=2496,
yN_size=3072,
yP_size=3072,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"10k[1]-n2560-512": dict(
W=13.625,
fov=1,
N=10240,
Nx=64,
yB_size=2080,
yN_size=2560,
yP_size=2560,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"8k[1]-n2k-512": dict(
W=13.625,
fov=1,
N=8192,
Nx=64,
yB_size=1664,
yN_size=2048,
yP_size=2048,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"7k[1]-n1792-512": dict(
W=13.625,
fov=1,
N=7168,
Nx=64,
yB_size=1456,
yN_size=1792,
yP_size=1792,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"6k[1]-n1536-512": dict(
W=13.625,
fov=1,
N=6144,
Nx=64,
yB_size=1248,
yN_size=1536,
yP_size=1536,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"5k[1]-n1280-512": dict(
W=13.625,
fov=1,
N=5120,
Nx=64,
yB_size=1040,
yN_size=1280,
yP_size=1280,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"4k[1]-n1k-512": dict(
W=13.625,
fov=1,
N=4096,
Nx=64,
yB_size=832,
yN_size=1024,
yP_size=1024,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"3584[1]-n896-512": dict(
W=13.625,
fov=1,
N=3584,
Nx=64,
yB_size=728,
yN_size=896,
yP_size=896,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"3k[1]-n768-512": dict(
W=13.625,
fov=1,
N=3072,
Nx=64,
yB_size=624,
yN_size=768,
yP_size=768,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"2560[1]-n640-512": dict(
W=13.625,
fov=1,
N=2560,
Nx=64,
yB_size=520,
yN_size=640,
yP_size=640,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"2k[1]-n512-512": dict(
W=13.625,
fov=1,
N=2048,
Nx=64,
yB_size=416,
yN_size=512,
yP_size=512,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"1536[1]-n384-512": dict(
W=13.625,
fov=1,
N=1536,
Nx=64,
yB_size=312,
yN_size=384,
yP_size=384,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"1k[1]-n256-512": dict(
W=13.625,
fov=1,
N=1024,
Nx=64,
yB_size=208,
yN_size=256,
yP_size=256,
xA_size=320,
xM_size=512,
), # nfacet=5², eff 50.0%
"14k[1]-n4k-448": dict(
W=16.5,
fov=1,
N=14336,
Nx=8,
yB_size=3584,
yN_size=4096,
yP_size=4096,
xA_size=344,
xM_size=448,
), # nfacet=4², eff 67.2%
"7k[1]-n2k-448": dict(
W=16.5,
fov=1,
N=7168,
Nx=8,
yB_size=1792,
yN_size=2048,
yP_size=2048,
xA_size=344,
xM_size=448,
), # nfacet=4², eff 67.2%
"3584[1]-n1k-448": dict(
W=16.5,
fov=1,
N=3584,
Nx=8,
yB_size=896,
yN_size=1024,
yP_size=1024,
xA_size=344,
xM_size=448,
), # nfacet=4², eff 67.2%
"1792[1]-n512-448": dict(
W=16.5,
fov=1,
N=1792,
Nx=8,
yB_size=448,
yN_size=512,
yP_size=512,
xA_size=344,
xM_size=448,
), # nfacet=4², eff 67.2%
"12k[1]-n4k-384": dict(
W=12.0,
fov=1,
N=12288,
Nx=8,
yB_size=3072,
yN_size=4096,
yP_size=4096,
xA_size=296,
xM_size=384,
), # nfacet=4², eff 57.8%
"6k[1]-n2k-384": dict(
W=12.0,
fov=1,
N=6144,
Nx=8,
yB_size=1536,
yN_size=2048,
yP_size=2048,
xA_size=296,
xM_size=384,
), # nfacet=4², eff 57.8%
"3k[1]-n1k-384": dict(
W=12.0,
fov=1,
N=3072,
Nx=8,
yB_size=768,
yN_size=1024,
yP_size=1024,
xA_size=296,
xM_size=384,
), # nfacet=4², eff 57.8%
"1536[1]-n512-384": dict(
W=12.0,
fov=1,
N=1536,
Nx=8,
yB_size=384,
yN_size=512,
yP_size=512,
xA_size=296,
xM_size=384,
), # nfacet=4², eff 57.8%
"10k[1]-n4k-320": dict(
W=15.625,
fov=1,
N=10240,
Nx=64,
yB_size=3520,
yN_size=4096,
yP_size=4096,
xA_size=256,
xM_size=320,
), # nfacet=3², eff 66.7%
"5k[1]-n2k-320": dict(
W=15.625,
fov=1,
N=5120,
Nx=64,
yB_size=1760,
yN_size=2048,
yP_size=2048,
xA_size=256,
xM_size=320,
), # nfacet=3², eff 66.7%
"2560[1]-n1k-320": dict(
W=15.625,
fov=1,
N=2560,
Nx=64,
yB_size=880,
yN_size=1024,
yP_size=1024,
xA_size=256,
xM_size=320,
), # nfacet=3², eff 66.7%
"1280[1]-n512-320": dict(
W=15.625,
fov=1,
N=1280,
Nx=64,
yB_size=440,
yN_size=512,
yP_size=512,
xA_size=256,
xM_size=320,
), # nfacet=3², eff 66.7%
"16k[1]-n8k-256": dict(
W=11.0,
fov=1,
N=16384,
Nx=32,
yB_size=5632,
yN_size=8192,
yP_size=8192,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"14k[1]-n7k-256": dict(
W=11.0,
fov=1,
N=14336,
Nx=32,
yB_size=4928,
yN_size=7168,
yP_size=7168,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"12k[1]-n6k-256": dict(
W=11.0,
fov=1,
N=12288,
Nx=32,
yB_size=4224,
yN_size=6144,
yP_size=6144,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"10k[1]-n5k-256": dict(
W=11.0,
fov=1,
N=10240,
Nx=32,
yB_size=3520,
yN_size=5120,
yP_size=5120,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"8k[1]-n4k-256": dict(
W=11.0,
fov=1,
N=8192,
Nx=32,
yB_size=2816,
yN_size=4096,
yP_size=4096,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"7k[1]-n3584-256": dict(
W=11.0,
fov=1,
N=7168,
Nx=32,
yB_size=2464,
yN_size=3584,
yP_size=3584,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"6k[1]-n3k-256": dict(
W=11.0,
fov=1,
N=6144,
Nx=32,
yB_size=2112,
yN_size=3072,
yP_size=3072,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"5k[1]-n2560-256": dict(
W=11.0,
fov=1,
N=5120,
Nx=32,
yB_size=1760,
yN_size=2560,
yP_size=2560,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"4k[1]-n2k-256": dict(
W=11.0,
fov=1,
N=4096,
Nx=32,
yB_size=1408,
yN_size=2048,
yP_size=2048,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"3584[1]-n1792-256": dict(
W=11.0,
fov=1,
N=3584,
Nx=32,
yB_size=1232,
yN_size=1792,
yP_size=1792,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"3k[1]-n1536-256": dict(
W=11.0,
fov=1,
N=3072,
Nx=32,
yB_size=1056,
yN_size=1536,
yP_size=1536,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"2560[1]-n1280-256": dict(
W=11.0,
fov=1,
N=2560,
Nx=32,
yB_size=880,
yN_size=1280,
yP_size=1280,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"2k[1]-n1k-256": dict(
W=11.0,
fov=1,
N=2048,
Nx=32,
yB_size=704,
yN_size=1024,
yP_size=1024,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"1792[1]-n896-256": dict(
W=11.0,
fov=1,
N=1792,
Nx=32,
yB_size=616,
yN_size=896,
yP_size=896,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"1536[1]-n768-256": dict(
W=11.0,
fov=1,
N=1536,
Nx=32,
yB_size=528,
yN_size=768,
yP_size=768,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"1280[1]-n640-256": dict(
W=11.0,
fov=1,
N=1280,
Nx=32,
yB_size=440,
yN_size=640,
yP_size=640,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"1k[1]-n512-256": dict(
W=11.0,
fov=1,
N=1024,
Nx=32,
yB_size=352,
yN_size=512,
yP_size=512,
xA_size=160,
xM_size=256,
), # nfacet=3², eff 41.7%
"16k[1]-8k-512": dict(
W=17.0,
fov=1,
N=16384,
Nx=32,
yB_size=4096,
yN_size=4640,
yP_size=8192,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"14k[1]-7k-512": dict(
W=17.0,
fov=1,
N=14336,
Nx=32,
yB_size=3584,
yN_size=4060,
yP_size=7168,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"12k[1]-6k-512": dict(
W=17.0,
fov=1,
N=12288,
Nx=32,
yB_size=3072,
yN_size=3480,
yP_size=6144,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"10k[1]-5k-512": dict(
W=17.0,
fov=1,
N=10240,
Nx=32,
yB_size=2560,
yN_size=2900,
yP_size=5120,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"8k[1]-4k-512": dict(
W=17.0,
fov=1,
N=8192,
Nx=32,
yB_size=2048,
yN_size=2320,
yP_size=4096,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"7k[1]-3584-512": dict(
W=17.0,
fov=1,
N=7168,
Nx=32,
yB_size=1792,
yN_size=2030,
yP_size=3584,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"6k[1]-3k-512": dict(
W=17.0,
fov=1,
N=6144,
Nx=32,
yB_size=1536,
yN_size=1740,
yP_size=3072,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"5k[1]-2560-512": dict(
W=17.0,
fov=1,
N=5120,
Nx=32,
yB_size=1280,
yN_size=1450,
yP_size=2560,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"4k[1]-2k-512": dict(
W=17.0,
fov=1,
N=4096,
Nx=32,
yB_size=1024,
yN_size=1160,
yP_size=2048,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"3584[1]-1792-512": dict(
W=17.0,
fov=1,
N=3584,
Nx=32,
yB_size=896,
yN_size=1015,
yP_size=1792,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"3k[1]-1536-512": dict(
W=17.0,
fov=1,
N=3072,
Nx=32,
yB_size=768,
yN_size=870,
yP_size=1536,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"2560[1]-1280-512": dict(
W=17.0,
fov=1,
N=2560,
Nx=32,
yB_size=640,
yN_size=725,
yP_size=1280,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"2k[1]-1k-512": dict(
W=17.0,
fov=1,
N=2048,
Nx=32,
yB_size=512,
yN_size=580,
yP_size=1024,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"1536[1]-768-512": dict(
W=17.0,
fov=1,
N=1536,
Nx=32,
yB_size=384,
yN_size=435,
yP_size=768,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"1k[1]-512-512": dict(
W=17.0,
fov=1,
N=1024,
Nx=32,
yB_size=256,
yN_size=290,
yP_size=512,
xA_size=352,
xM_size=512,
), # nfacet=4², eff 60.7%
"14k[1]-8k-448": dict(
W=16.5,
fov=1,
N=14336,
Nx=64,
yB_size=3584,
yN_size=4096,
yP_size=8192,
xA_size=320,
xM_size=448,
), # nfacet=4², eff 62.5%
"7k[1]-4k-448": dict(
W=16.5,
fov=1,
N=7168,
Nx=64,
yB_size=1792,
yN_size=2048,
yP_size=4096,
xA_size=320,
xM_size=448,
), # nfacet=4², eff 62.5%
"3584[1]-2k-448": dict(
W=16.5,
fov=1,
N=3584,
Nx=64,
yB_size=896,
yN_size=1024,
yP_size=2048,
xA_size=320,
xM_size=448,
), # nfacet=4², eff 62.5%
"1792[1]-1k-448": dict(
W=16.5,
fov=1,
N=1792,
Nx=64,
yB_size=448,
yN_size=512,
yP_size=1024,
xA_size=320,
xM_size=448,
), # nfacet=4², eff 62.5%
"12k[1]-8k-384": dict(
W=16.875,
fov=1,
N=12288,
Nx=24,
yB_size=3072,
yN_size=3488,
yP_size=8192,
xA_size=264,
xM_size=384,
), # nfacet=4², eff 60.6%
"6k[1]-4k-384": dict(
W=16.875,
fov=1,
N=6144,
Nx=24,
yB_size=1536,
yN_size=1744,
yP_size=4096,
xA_size=264,
xM_size=384,
), # nfacet=4², eff 60.6%
"3k[1]-2k-384": dict(
W=16.875,
fov=1,
N=3072,
Nx=24,
yB_size=768,
yN_size=872,
yP_size=2048,
xA_size=264,
xM_size=384,
), # nfacet=4², eff 60.6%
"1536[1]-1k-384": dict(
W=16.875,
fov=1,
N=1536,
Nx=24,
yB_size=384,
yN_size=436,
yP_size=1024,
xA_size=264,
xM_size=384,
), # nfacet=4², eff 60.6%
"10k[1]-8k-320": dict(
W=16.125,
fov=1,
N=10240,
Nx=8,
yB_size=2560,
yN_size=2944,
yP_size=8192,
xA_size=216,
xM_size=320,
), # nfacet=4², eff 58.7%
"5k[1]-4k-320": dict(
W=16.125,
fov=1,
N=5120,
Nx=8,
yB_size=1280,
yN_size=1472,
yP_size=4096,
xA_size=216,
xM_size=320,
), # nfacet=4², eff 58.7%
"2560[1]-2k-320": dict(
W=16.125,
fov=1,
N=2560,
Nx=8,
yB_size=640,
yN_size=736,
yP_size=2048,
xA_size=216,
xM_size=320,
), # nfacet=4², eff 58.7%
"1280[1]-1k-320": dict(
W=16.125,
fov=1,
N=1280,
Nx=8,
yB_size=320,
yN_size=368,
yP_size=1024,
xA_size=216,
xM_size=320,
), # nfacet=4², eff 58.7%
"16k[1]-16k-256": dict(
W=17.0,
fov=1,
N=16384,
Nx=16,
yB_size=8192,
yN_size=9280,
yP_size=16384,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"14k[1]-14k-256": dict(
W=17.0,
fov=1,
N=14336,
Nx=16,
yB_size=7168,
yN_size=8120,
yP_size=14336,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"12k[1]-12k-256": dict(
W=17.0,
fov=1,
N=12288,
Nx=16,
yB_size=6144,
yN_size=6960,
yP_size=12288,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"10k[1]-10k-256": dict(
W=17.0,
fov=1,
N=10240,
Nx=16,
yB_size=5120,
yN_size=5800,
yP_size=10240,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"8k[1]-8k-256": dict(
W=17.0,
fov=1,
N=8192,
Nx=16,
yB_size=4096,
yN_size=4640,
yP_size=8192,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"7k[1]-7k-256": dict(
W=17.0,
fov=1,
N=7168,
Nx=16,
yB_size=3584,
yN_size=4060,
yP_size=7168,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"6k[1]-6k-256": dict(
W=17.0,
fov=1,
N=6144,
Nx=16,
yB_size=3072,
yN_size=3480,
yP_size=6144,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"5k[1]-5k-256": dict(
W=17.0,
fov=1,
N=5120,
Nx=16,
yB_size=2560,
yN_size=2900,
yP_size=5120,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"4k[1]-4k-256": dict(
W=17.0,
fov=1,
N=4096,
Nx=16,
yB_size=2048,
yN_size=2320,
yP_size=4096,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"3584[1]-3584-256": dict(
W=17.0,
fov=1,
N=3584,
Nx=16,
yB_size=1792,
yN_size=2030,
yP_size=3584,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"3k[1]-3k-256": dict(
W=17.0,
fov=1,
N=3072,
Nx=16,
yB_size=1536,
yN_size=1740,
yP_size=3072,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"2560[1]-2560-256": dict(
W=17.0,
fov=1,
N=2560,
Nx=16,
yB_size=1280,
yN_size=1450,
yP_size=2560,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"2k[1]-2k-256": dict(
W=17.0,
fov=1,
N=2048,
Nx=16,
yB_size=1024,
yN_size=1160,
yP_size=2048,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"1792[1]-1792-256": dict(
W=17.0,
fov=1,
N=1792,
Nx=16,
yB_size=896,
yN_size=1015,
yP_size=1792,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"1536[1]-1536-256": dict(
W=17.0,
fov=1,
N=1536,
Nx=16,
yB_size=768,
yN_size=870,
yP_size=1536,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"1280[1]-1280-256": dict(
W=17.0,
fov=1,
N=1280,
Nx=16,
yB_size=640,
yN_size=725,
yP_size=1280,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"1k[1]-1k-256": dict(
W=17.0,
fov=1,
N=1024,
Nx=16,
yB_size=512,
yN_size=580,
yP_size=1024,
xA_size=176,
xM_size=256,
), # nfacet=2², eff 60.7%
"16k[1]-7k-512": dict(
W=15.875,
fov=1,
N=16384,
Nx=32,
yB_size=4096,
yN_size=4736,
yP_size=7168,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"14k[1]-6272-512": dict(
W=15.875,
fov=1,
N=14336,
Nx=32,
yB_size=3584,
yN_size=4144,
yP_size=6272,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"12k[1]-5376-512": dict(
W=15.875,
fov=1,
N=12288,
Nx=32,
yB_size=3072,
yN_size=3552,
yP_size=5376,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"10k[1]-4480-512": dict(
W=15.875,
fov=1,
N=10240,
Nx=32,
yB_size=2560,
yN_size=2960,
yP_size=4480,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"8k[1]-3584-512": dict(
W=15.875,
fov=1,
N=8192,
Nx=32,
yB_size=2048,
yN_size=2368,
yP_size=3584,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"7k[1]-3136-512": dict(
W=15.875,
fov=1,
N=7168,
Nx=32,
yB_size=1792,
yN_size=2072,
yP_size=3136,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"6k[1]-2688-512": dict(
W=15.875,
fov=1,
N=6144,
Nx=32,
yB_size=1536,
yN_size=1776,
yP_size=2688,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"5k[1]-2240-512": dict(
W=15.875,
fov=1,
N=5120,
Nx=32,
yB_size=1280,
yN_size=1480,
yP_size=2240,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"4k[1]-1792-512": dict(
W=15.875,
fov=1,
N=4096,
Nx=32,
yB_size=1024,
yN_size=1184,
yP_size=1792,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"3584[1]-1568-512": dict(
W=15.875,
fov=1,
N=3584,
Nx=32,
yB_size=896,
yN_size=1036,
yP_size=1568,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"3k[1]-1344-512": dict(
W=15.875,
fov=1,
N=3072,
Nx=32,
yB_size=768,
yN_size=888,
yP_size=1344,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"2560[1]-1120-512": dict(
W=15.875,
fov=1,
N=2560,
Nx=32,
yB_size=640,
yN_size=740,
yP_size=1120,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"2k[1]-896-512": dict(
W=15.875,
fov=1,
N=2048,
Nx=32,
yB_size=512,
yN_size=592,
yP_size=896,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"1536[1]-672-512": dict(
W=15.875,
fov=1,
N=1536,
Nx=32,
yB_size=384,
yN_size=444,
yP_size=672,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"1k[1]-448-512": dict(
W=15.875,
fov=1,
N=1024,
Nx=32,
yB_size=256,
yN_size=296,
yP_size=448,
xA_size=416,
xM_size=512,
), # nfacet=4², eff 70.3%
"14k[1]-7k-448": dict(
W=15.875,
fov=1,
N=14336,
Nx=28,
yB_size=2048,
yN_size=2368,
yP_size=7168,
xA_size=252,
xM_size=448,
), # nfacet=7², eff 48.6%
"7k[1]-3584-448": dict(
W=15.875,
fov=1,
N=7168,
Nx=28,
yB_size=1024,
yN_size=1184,
yP_size=3584,
xA_size=252,
xM_size=448,
), # nfacet=7², eff 48.6%
"3584[1]-1792-448": dict(
W=15.875,
fov=1,
N=3584,
Nx=28,
yB_size=512,
yN_size=592,
yP_size=1792,
xA_size=252,
xM_size=448,
), # nfacet=7², eff 48.6%
"1792[1]-896-448": dict(
W=15.875,
fov=1,
N=1792,
Nx=28,
yB_size=256,
yN_size=296,
yP_size=896,
xA_size=252,
xM_size=448,
), # nfacet=7², eff 48.6%
"12k[1]-7k-384": dict(
W=15.875,
fov=1,
N=12288,
Nx=3,
yB_size=4096,
yN_size=4736,
yP_size=7168,
xA_size=315,
xM_size=384,
), # nfacet=3², eff 70.9%
"6k[1]-3584-384": dict(
W=15.875,
fov=1,
N=6144,
Nx=3,
yB_size=2048,
yN_size=2368,
yP_size=3584,
xA_size=315,
xM_size=384,
), # nfacet=3², eff 70.9%
"3k[1]-1792-384": dict(
W=15.875,
fov=1,
N=3072,
Nx=3,
yB_size=1024,
yN_size=1184,
yP_size=1792,
xA_size=315,
xM_size=384,
), # nfacet=3², eff 70.9%
"1536[1]-896-384": dict(
W=15.875,
fov=1,
N=1536,
Nx=3,
yB_size=512,
yN_size=592,
yP_size=896,
xA_size=315,
xM_size=384,
), # nfacet=3², eff 70.9%
"10k[1]-7k-320": dict(
W=15.875,
fov=1,
N=10240,
Nx=20,
yB_size=2048,
yN_size=2368,
yP_size=7168,
xA_size=180,
xM_size=320,
), # nfacet=5², eff 48.6%
"5k[1]-3584-320": dict(
W=15.875,
fov=1,
N=5120,
Nx=20,
yB_size=1024,
yN_size=1184,
yP_size=3584,
xA_size=180,
xM_size=320,
), # nfacet=5², eff 48.6%
"2560[1]-1792-320": dict(
W=15.875,
fov=1,
N=2560,
Nx=20,
yB_size=512,
yN_size=592,
yP_size=1792,
xA_size=180,
xM_size=320,
), # nfacet=5², eff 48.6%
"1280[1]-896-320": dict(
W=15.875,
fov=1,
N=1280,
Nx=20,
yB_size=256,
yN_size=296,
yP_size=896,
xA_size=180,
xM_size=320,
), # nfacet=5², eff 48.6%
"16k[1]-14k-256": dict(
W=15.875,
fov=1,
N=16384,
Nx=16,
yB_size=8192,
yN_size=9472,
yP_size=14336,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"14k[1]-12544-256": dict(
W=15.875,
fov=1,
N=14336,
Nx=16,
yB_size=7168,
yN_size=8288,
yP_size=12544,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"12k[1]-10752-256": dict(
W=15.875,
fov=1,
N=12288,
Nx=16,
yB_size=6144,
yN_size=7104,
yP_size=10752,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"10k[1]-8960-256": dict(
W=15.875,
fov=1,
N=10240,
Nx=16,
yB_size=5120,
yN_size=5920,
yP_size=8960,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"8k[1]-7k-256": dict(
W=15.875,
fov=1,
N=8192,
Nx=16,
yB_size=4096,
yN_size=4736,
yP_size=7168,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"7k[1]-6272-256": dict(
W=15.875,
fov=1,
N=7168,
Nx=16,
yB_size=3584,
yN_size=4144,
yP_size=6272,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"6k[1]-5376-256": dict(
W=15.875,
fov=1,
N=6144,
Nx=16,
yB_size=3072,
yN_size=3552,
yP_size=5376,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"5k[1]-4480-256": dict(
W=15.875,
fov=1,
N=5120,
Nx=16,
yB_size=2560,
yN_size=2960,
yP_size=4480,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"4k[1]-3584-256": dict(
W=15.875,
fov=1,
N=4096,
Nx=16,
yB_size=2048,
yN_size=2368,
yP_size=3584,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"3584[1]-3136-256": dict(
W=15.875,
fov=1,
N=3584,
Nx=16,
yB_size=1792,
yN_size=2072,
yP_size=3136,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"3k[1]-2688-256": dict(
W=15.875,
fov=1,
N=3072,
Nx=16,
yB_size=1536,
yN_size=1776,
yP_size=2688,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"2560[1]-2240-256": dict(
W=15.875,
fov=1,
N=2560,
Nx=16,
yB_size=1280,
yN_size=1480,
yP_size=2240,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"2k[1]-1792-256": dict(
W=15.875,
fov=1,
N=2048,
Nx=16,
yB_size=1024,
yN_size=1184,
yP_size=1792,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"1792[1]-1568-256": dict(
W=15.875,
fov=1,
N=1792,
Nx=16,
yB_size=896,
yN_size=1036,
yP_size=1568,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"1536[1]-1344-256": dict(
W=15.875,
fov=1,
N=1536,
Nx=16,
yB_size=768,
yN_size=888,
yP_size=1344,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"1280[1]-1120-256": dict(
W=15.875,
fov=1,
N=1280,
Nx=16,
yB_size=640,
yN_size=740,
yP_size=1120,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"1k[1]-896-256": dict(
W=15.875,
fov=1,
N=1024,
Nx=16,
yB_size=512,
yN_size=592,
yP_size=896,
xA_size=208,
xM_size=256,
), # nfacet=2², eff 70.3%
"16k[1]-6k-512": dict(
W=15.25,
fov=1,
N=16384,
Nx=16,
yB_size=2048,
yN_size=2400,
yP_size=6144,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"14k[1]-5376-512": dict(
W=15.25,
fov=1,
N=14336,
Nx=16,
yB_size=1792,
yN_size=2100,
yP_size=5376,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"12k[1]-4608-512": dict(
W=15.25,
fov=1,
N=12288,
Nx=16,
yB_size=1536,
yN_size=1800,
yP_size=4608,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"10k[1]-3840-512": dict(
W=15.25,
fov=1,
N=10240,
Nx=16,
yB_size=1280,
yN_size=1500,
yP_size=3840,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"8k[1]-3k-512": dict(
W=15.25,
fov=1,
N=8192,
Nx=16,
yB_size=1024,
yN_size=1200,
yP_size=3072,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"7k[1]-2688-512": dict(
W=15.25,
fov=1,
N=7168,
Nx=16,
yB_size=896,
yN_size=1050,
yP_size=2688,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"6k[1]-2304-512": dict(
W=15.25,
fov=1,
N=6144,
Nx=16,
yB_size=768,
yN_size=900,
yP_size=2304,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"5k[1]-1920-512": dict(
W=15.25,
fov=1,
N=5120,
Nx=16,
yB_size=640,
yN_size=750,
yP_size=1920,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"4k[1]-1536-512": dict(
W=15.25,
fov=1,
N=4096,
Nx=16,
yB_size=512,
yN_size=600,
yP_size=1536,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"3584[1]-1344-512": dict(
W=15.25,
fov=1,
N=3584,
Nx=16,
yB_size=448,
yN_size=525,
yP_size=1344,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"3k[1]-1152-512": dict(
W=15.25,
fov=1,
N=3072,
Nx=16,
yB_size=384,
yN_size=450,
yP_size=1152,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"2560[1]-960-512": dict(
W=15.25,
fov=1,
N=2560,
Nx=16,
yB_size=320,
yN_size=375,
yP_size=960,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"2k[1]-768-512": dict(
W=15.25,
fov=1,
N=2048,
Nx=16,
yB_size=256,
yN_size=300,
yP_size=768,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"1536[1]-576-512": dict(
W=15.25,
fov=1,
N=1536,
Nx=16,
yB_size=192,
yN_size=225,
yP_size=576,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"1k[1]-384-512": dict(
W=15.25,
fov=1,
N=1024,
Nx=16,
yB_size=128,
yN_size=150,
yP_size=384,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 56.0%
"14k[1]-6k-448": dict(
W=15.25,
fov=1,
N=14336,
Nx=7,
yB_size=2048,
yN_size=2400,
yP_size=6144,
xA_size=301,
xM_size=448,
), # nfacet=7², eff 57.3%
"7k[1]-3k-448": dict(
W=15.25,
fov=1,
N=7168,
Nx=7,
yB_size=1024,
yN_size=1200,
yP_size=3072,
xA_size=301,
xM_size=448,
), # nfacet=7², eff 57.3%
"3584[1]-1536-448": dict(
W=15.25,
fov=1,
N=3584,
Nx=7,
yB_size=512,
yN_size=600,
yP_size=1536,
xA_size=301,
xM_size=448,
), # nfacet=7², eff 57.3%
"1792[1]-768-448": dict(
W=15.25,
fov=1,
N=1792,
Nx=7,
yB_size=256,
yN_size=300,
yP_size=768,
xA_size=301,
xM_size=448,
), # nfacet=7², eff 57.3%
"12k[1]-6k-384": dict(
W=14.75,
fov=1,
N=12288,
Nx=16,
yB_size=3072,
yN_size=3648,
yP_size=6144,
xA_size=304,
xM_size=384,
), # nfacet=4², eff 66.7%
"6k[1]-3k-384": dict(
W=14.75,
fov=1,
N=6144,
Nx=16,
yB_size=1536,
yN_size=1824,
yP_size=3072,
xA_size=304,
xM_size=384,
), # nfacet=4², eff 66.7%
"3k[1]-1536-384": dict(
W=14.75,
fov=1,
N=3072,
Nx=16,
yB_size=768,
yN_size=912,
yP_size=1536,
xA_size=304,
xM_size=384,
), # nfacet=4², eff 66.7%
"1536[1]-768-384": dict(
W=14.75,
fov=1,
N=1536,
Nx=16,
yB_size=384,
yN_size=456,
yP_size=768,
xA_size=304,
xM_size=384,
), # nfacet=4², eff 66.7%
"10k[1]-6k-320": dict(
W=15.25,
fov=1,
N=10240,
Nx=5,
yB_size=2048,
yN_size=2400,
yP_size=6144,
xA_size=215,
xM_size=320,
), # nfacet=5², eff 57.3%
"5k[1]-3k-320": dict(
W=15.25,
fov=1,
N=5120,
Nx=5,
yB_size=1024,
yN_size=1200,
yP_size=3072,
xA_size=215,
xM_size=320,
), # nfacet=5², eff 57.3%
"2560[1]-1536-320": dict(
W=15.25,
fov=1,
N=2560,
Nx=5,
yB_size=512,
yN_size=600,
yP_size=1536,
xA_size=215,
xM_size=320,
), # nfacet=5², eff 57.3%
"1280[1]-768-320": dict(
W=15.25,
fov=1,
N=1280,
Nx=5,
yB_size=256,
yN_size=300,
yP_size=768,
xA_size=215,
xM_size=320,
), # nfacet=5², eff 57.3%
"16k[1]-12k-256": dict(
W=15.25,
fov=1,
N=16384,
Nx=8,
yB_size=4096,
yN_size=4800,
yP_size=12288,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"14k[1]-10752-256": dict(
W=15.25,
fov=1,
N=14336,
Nx=8,
yB_size=3584,
yN_size=4200,
yP_size=10752,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"12k[1]-9k-256": dict(
W=15.25,
fov=1,
N=12288,
Nx=8,
yB_size=3072,
yN_size=3600,
yP_size=9216,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"10k[1]-7680-256": dict(
W=15.25,
fov=1,
N=10240,
Nx=8,
yB_size=2560,
yN_size=3000,
yP_size=7680,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"8k[1]-6k-256": dict(
W=15.25,
fov=1,
N=8192,
Nx=8,
yB_size=2048,
yN_size=2400,
yP_size=6144,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"7k[1]-5376-256": dict(
W=15.25,
fov=1,
N=7168,
Nx=8,
yB_size=1792,
yN_size=2100,
yP_size=5376,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"6k[1]-4608-256": dict(
W=15.25,
fov=1,
N=6144,
Nx=8,
yB_size=1536,
yN_size=1800,
yP_size=4608,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"5k[1]-3840-256": dict(
W=15.25,
fov=1,
N=5120,
Nx=8,
yB_size=1280,
yN_size=1500,
yP_size=3840,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"4k[1]-3k-256": dict(
W=15.25,
fov=1,
N=4096,
Nx=8,
yB_size=1024,
yN_size=1200,
yP_size=3072,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"3584[1]-2688-256": dict(
W=15.25,
fov=1,
N=3584,
Nx=8,
yB_size=896,
yN_size=1050,
yP_size=2688,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"3k[1]-2304-256": dict(
W=15.25,
fov=1,
N=3072,
Nx=8,
yB_size=768,
yN_size=900,
yP_size=2304,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"2560[1]-1920-256": dict(
W=15.25,
fov=1,
N=2560,
Nx=8,
yB_size=640,
yN_size=750,
yP_size=1920,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"2k[1]-1536-256": dict(
W=15.25,
fov=1,
N=2048,
Nx=8,
yB_size=512,
yN_size=600,
yP_size=1536,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"1792[1]-1344-256": dict(
W=15.25,
fov=1,
N=1792,
Nx=8,
yB_size=448,
yN_size=525,
yP_size=1344,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"1536[1]-1152-256": dict(
W=15.25,
fov=1,
N=1536,
Nx=8,
yB_size=384,
yN_size=450,
yP_size=1152,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"1280[1]-960-256": dict(
W=15.25,
fov=1,
N=1280,
Nx=8,
yB_size=320,
yN_size=375,
yP_size=960,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"1k[1]-768-256": dict(
W=15.25,
fov=1,
N=1024,
Nx=8,
yB_size=256,
yN_size=300,
yP_size=768,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 56.0%
"16k[1]-5k-512": dict(
W=14.75,
fov=1,
N=16384,
Nx=16,
yB_size=2048,
yN_size=2432,
yP_size=5120,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"14k[1]-4480-512": dict(
W=14.75,
fov=1,
N=14336,
Nx=16,
yB_size=1792,
yN_size=2128,
yP_size=4480,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"12k[1]-3840-512": dict(
W=14.75,
fov=1,
N=12288,
Nx=16,
yB_size=1536,
yN_size=1824,
yP_size=3840,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"10k[1]-3200-512": dict(
W=14.75,
fov=1,
N=10240,
Nx=16,
yB_size=1280,
yN_size=1520,
yP_size=3200,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"8k[1]-2560-512": dict(
W=14.75,
fov=1,
N=8192,
Nx=16,
yB_size=1024,
yN_size=1216,
yP_size=2560,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"7k[1]-2240-512": dict(
W=14.75,
fov=1,
N=7168,
Nx=16,
yB_size=896,
yN_size=1064,
yP_size=2240,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"6k[1]-1920-512": dict(
W=14.75,
fov=1,
N=6144,
Nx=16,
yB_size=768,
yN_size=912,
yP_size=1920,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"5k[1]-1600-512": dict(
W=14.75,
fov=1,
N=5120,
Nx=16,
yB_size=640,
yN_size=760,
yP_size=1600,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"4k[1]-1280-512": dict(
W=14.75,
fov=1,
N=4096,
Nx=16,
yB_size=512,
yN_size=608,
yP_size=1280,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"3584[1]-1120-512": dict(
W=14.75,
fov=1,
N=3584,
Nx=16,
yB_size=448,
yN_size=532,
yP_size=1120,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"3k[1]-960-512": dict(
W=14.75,
fov=1,
N=3072,
Nx=16,
yB_size=384,
yN_size=456,
yP_size=960,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"2560[1]-800-512": dict(
W=14.75,
fov=1,
N=2560,
Nx=16,
yB_size=320,
yN_size=380,
yP_size=800,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"2k[1]-640-512": dict(
W=14.75,
fov=1,
N=2048,
Nx=16,
yB_size=256,
yN_size=304,
yP_size=640,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"1536[1]-480-512": dict(
W=14.75,
fov=1,
N=1536,
Nx=16,
yB_size=192,
yN_size=228,
yP_size=480,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"1k[1]-320-512": dict(
W=14.75,
fov=1,
N=1024,
Nx=16,
yB_size=128,
yN_size=152,
yP_size=320,
xA_size=336,
xM_size=512,
), # nfacet=8², eff 55.3%
"14k[1]-5k-448": dict(
W=14.0,
fov=1,
N=14336,
Nx=16,
yB_size=1792,
yN_size=2176,
yP_size=5120,
xA_size=272,
xM_size=448,
), # nfacet=8², eff 50.0%
"7k[1]-2560-448": dict(
W=14.0,
fov=1,
N=7168,
Nx=16,
yB_size=896,
yN_size=1088,
yP_size=2560,
xA_size=272,
xM_size=448,
), # nfacet=8², eff 50.0%
"3584[1]-1280-448": dict(
W=14.0,
fov=1,
N=3584,
Nx=16,
yB_size=448,
yN_size=544,
yP_size=1280,
xA_size=272,
xM_size=448,
), # nfacet=8², eff 50.0%
"1792[1]-640-448": dict(
W=14.0,
fov=1,
N=1792,
Nx=16,
yB_size=224,
yN_size=272,
yP_size=640,
xA_size=272,
xM_size=448,
), # nfacet=8², eff 50.0%
"12k[1]-5k-384": dict(
W=14.75,
fov=1,
N=12288,
Nx=12,
yB_size=2048,
yN_size=2432,
yP_size=5120,
xA_size=252,
xM_size=384,
), # nfacet=6², eff 55.3%
"6k[1]-2560-384": dict(
W=14.75,
fov=1,
N=6144,
Nx=12,
yB_size=1024,
yN_size=1216,
yP_size=2560,
xA_size=252,
xM_size=384,
), # nfacet=6², eff 55.3%
"3k[1]-1280-384": dict(
W=14.75,
fov=1,
N=3072,
Nx=12,
yB_size=512,
yN_size=608,
yP_size=1280,
xA_size=252,
xM_size=384,
), # nfacet=6², eff 55.3%
"1536[1]-640-384": dict(
W=14.75,
fov=1,
N=1536,
Nx=12,
yB_size=256,
yN_size=304,
yP_size=640,
xA_size=252,
xM_size=384,
), # nfacet=6², eff 55.3%
"10k[1]-5k-320": dict(
W=14.375,
fov=1,
N=10240,
Nx=8,
yB_size=2560,
yN_size=3072,
yP_size=5120,
xA_size=232,
xM_size=320,
), # nfacet=4², eff 60.4%
"5k[1]-2560-320": dict(
W=14.375,
fov=1,
N=5120,
Nx=8,
yB_size=1280,
yN_size=1536,
yP_size=2560,
xA_size=232,
xM_size=320,
), # nfacet=4², eff 60.4%
"2560[1]-1280-320": dict(
W=14.375,
fov=1,
N=2560,
Nx=8,
yB_size=640,
yN_size=768,
yP_size=1280,
xA_size=232,
xM_size=320,
), # nfacet=4², eff 60.4%
"1280[1]-640-320": dict(
W=14.375,
fov=1,
N=1280,
Nx=8,
yB_size=320,
yN_size=384,
yP_size=640,
xA_size=232,
xM_size=320,
), # nfacet=4², eff 60.4%
"16k[1]-10k-256": dict(
W=14.75,
fov=1,
N=16384,
Nx=8,
yB_size=4096,
yN_size=4864,
yP_size=10240,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"14k[1]-8960-256": dict(
W=14.75,
fov=1,
N=14336,
Nx=8,
yB_size=3584,
yN_size=4256,
yP_size=8960,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"12k[1]-7680-256": dict(
W=14.75,
fov=1,
N=12288,
Nx=8,
yB_size=3072,
yN_size=3648,
yP_size=7680,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"10k[1]-6400-256": dict(
W=14.75,
fov=1,
N=10240,
Nx=8,
yB_size=2560,
yN_size=3040,
yP_size=6400,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"8k[1]-5k-256": dict(
W=14.75,
fov=1,
N=8192,
Nx=8,
yB_size=2048,
yN_size=2432,
yP_size=5120,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"7k[1]-4480-256": dict(
W=14.75,
fov=1,
N=7168,
Nx=8,
yB_size=1792,
yN_size=2128,
yP_size=4480,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"6k[1]-3840-256": dict(
W=14.75,
fov=1,
N=6144,
Nx=8,
yB_size=1536,
yN_size=1824,
yP_size=3840,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"5k[1]-3200-256": dict(
W=14.75,
fov=1,
N=5120,
Nx=8,
yB_size=1280,
yN_size=1520,
yP_size=3200,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"4k[1]-2560-256": dict(
W=14.75,
fov=1,
N=4096,
Nx=8,
yB_size=1024,
yN_size=1216,
yP_size=2560,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"3584[1]-2240-256": dict(
W=14.75,
fov=1,
N=3584,
Nx=8,
yB_size=896,
yN_size=1064,
yP_size=2240,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"3k[1]-1920-256": dict(
W=14.75,
fov=1,
N=3072,
Nx=8,
yB_size=768,
yN_size=912,
yP_size=1920,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"2560[1]-1600-256": dict(
W=14.75,
fov=1,
N=2560,
Nx=8,
yB_size=640,
yN_size=760,
yP_size=1600,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"2k[1]-1280-256": dict(
W=14.75,
fov=1,
N=2048,
Nx=8,
yB_size=512,
yN_size=608,
yP_size=1280,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"1792[1]-1120-256": dict(
W=14.75,
fov=1,
N=1792,
Nx=8,
yB_size=448,
yN_size=532,
yP_size=1120,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"1536[1]-960-256": dict(
W=14.75,
fov=1,
N=1536,
Nx=8,
yB_size=384,
yN_size=456,
yP_size=960,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"1280[1]-800-256": dict(
W=14.75,
fov=1,
N=1280,
Nx=8,
yB_size=320,
yN_size=380,
yP_size=800,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"1k[1]-640-256": dict(
W=14.75,
fov=1,
N=1024,
Nx=8,
yB_size=256,
yN_size=304,
yP_size=640,
xA_size=168,
xM_size=256,
), # nfacet=4², eff 55.3%
"16k[1]-4k-512": dict(
W=13.25,
fov=1,
N=16384,
Nx=256,
yB_size=2048,
yN_size=2560,
yP_size=4096,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"14k[1]-3584-512": dict(
W=13.25,
fov=1,
N=14336,
Nx=256,
yB_size=1792,
yN_size=2240,
yP_size=3584,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"12k[1]-3k-512": dict(
W=13.25,
fov=1,
N=12288,
Nx=256,
yB_size=1536,
yN_size=1920,
yP_size=3072,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"10k[1]-2560-512": dict(
W=13.25,
fov=1,
N=10240,
Nx=256,
yB_size=1280,
yN_size=1600,
yP_size=2560,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"8k[1]-2k-512": dict(
W=13.25,
fov=1,
N=8192,
Nx=256,
yB_size=1024,
yN_size=1280,
yP_size=2048,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"7k[1]-1792-512": dict(
W=13.25,
fov=1,
N=7168,
Nx=256,
yB_size=896,
yN_size=1120,
yP_size=1792,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"6k[1]-1536-512": dict(
W=13.25,
fov=1,
N=6144,
Nx=256,
yB_size=768,
yN_size=960,
yP_size=1536,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"5k[1]-1280-512": dict(
W=13.25,
fov=1,
N=5120,
Nx=256,
yB_size=640,
yN_size=800,
yP_size=1280,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"4k[1]-1k-512": dict(
W=13.25,
fov=1,
N=4096,
Nx=256,
yB_size=512,
yN_size=640,
yP_size=1024,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"3584[1]-896-512": dict(
W=13.25,
fov=1,
N=3584,
Nx=256,
yB_size=448,
yN_size=560,
yP_size=896,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"3k[1]-768-512": dict(
W=13.25,
fov=1,
N=3072,
Nx=256,
yB_size=384,
yN_size=480,
yP_size=768,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"2560[1]-640-512": dict(
W=13.25,
fov=1,
N=2560,
Nx=256,
yB_size=320,
yN_size=400,
yP_size=640,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"2k[1]-512-512": dict(
W=13.25,
fov=1,
N=2048,
Nx=256,
yB_size=256,
yN_size=320,
yP_size=512,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"1536[1]-384-512": dict(
W=13.25,
fov=1,
N=1536,
Nx=256,
yB_size=192,
yN_size=240,
yP_size=384,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"1k[1]-256-512": dict(
W=13.25,
fov=1,
N=1024,
Nx=256,
yB_size=128,
yN_size=160,
yP_size=256,
xA_size=256,
xM_size=512,
), # nfacet=8², eff 40.0%
"14k[1]-4k-448": dict(
W=13.25,
fov=1,
N=14336,
Nx=7,
yB_size=2048,
yN_size=2560,
yP_size=4096,
xA_size=329,
xM_size=448,
), # nfacet=7², eff 58.8%
"7k[1]-2k-448": dict(
W=13.25,
fov=1,
N=7168,
Nx=7,
yB_size=1024,
yN_size=1280,
yP_size=2048,
xA_size=329,
xM_size=448,
), # nfacet=7², eff 58.8%
"3584[1]-1k-448": dict(
W=13.25,
fov=1,
N=3584,
Nx=7,
yB_size=512,
yN_size=640,
yP_size=1024,
xA_size=329,
xM_size=448,
), # nfacet=7², eff 58.8%
"1792[1]-512-448": dict(
W=13.25,
fov=1,
N=1792,
Nx=7,
yB_size=256,
yN_size=320,
yP_size=512,
xA_size=329,
xM_size=448,
), # nfacet=7², eff 58.8%
"12k[1]-4k-384": dict(
W=13.25,
fov=1,
N=12288,
Nx=192,
yB_size=1536,
yN_size=1920,
yP_size=4096,
xA_size=192,
xM_size=384,
), # nfacet=8², eff 40.0%
"6k[1]-2k-384": dict(
W=13.25,
fov=1,
N=6144,
Nx=192,
yB_size=768,
yN_size=960,
yP_size=2048,
xA_size=192,
xM_size=384,
), # nfacet=8², eff 40.0%
"3k[1]-1k-384": dict(
W=13.25,
fov=1,
N=3072,
Nx=192,
yB_size=384,
yN_size=480,
yP_size=1024,
xA_size=192,
xM_size=384,
), # nfacet=8², eff 40.0%
"1536[1]-512-384": dict(
W=13.25,
fov=1,
N=1536,
Nx=192,
yB_size=192,
yN_size=240,
yP_size=512,
xA_size=192,
xM_size=384,
), # nfacet=8², eff 40.0%
"10k[1]-4k-320": dict(
W=13.25,
fov=1,
N=10240,
Nx=5,
yB_size=2048,
yN_size=2560,
yP_size=4096,
xA_size=235,
xM_size=320,
), # nfacet=5², eff 58.8%
"5k[1]-2k-320": dict(
W=13.25,
fov=1,
N=5120,
Nx=5,
yB_size=1024,
yN_size=1280,
yP_size=2048,
xA_size=235,
xM_size=320,
), # nfacet=5², eff 58.8%
"2560[1]-1k-320": dict(
W=13.25,
fov=1,
N=2560,
Nx=5,
yB_size=512,
yN_size=640,
yP_size=1024,
xA_size=235,
xM_size=320,
), # nfacet=5², eff 58.8%
"1280[1]-512-320": dict(
W=13.25,
fov=1,
N=1280,
Nx=5,
yB_size=256,
yN_size=320,
yP_size=512,
xA_size=235,
xM_size=320,
), # nfacet=5², eff 58.8%
"16k[1]-8k-256": dict(
W=13.25,
fov=1,
N=16384,
Nx=128,
yB_size=4096,
yN_size=5120,
yP_size=8192,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"14k[1]-7k-256": dict(
W=13.25,
fov=1,
N=14336,
Nx=128,
yB_size=3584,
yN_size=4480,
yP_size=7168,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"12k[1]-6k-256": dict(
W=13.25,
fov=1,
N=12288,
Nx=128,
yB_size=3072,
yN_size=3840,
yP_size=6144,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"10k[1]-5k-256": dict(
W=13.25,
fov=1,
N=10240,
Nx=128,
yB_size=2560,
yN_size=3200,
yP_size=5120,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"8k[1]-4k-256": dict(
W=13.25,
fov=1,
N=8192,
Nx=128,
yB_size=2048,
yN_size=2560,
yP_size=4096,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"7k[1]-3584-256": dict(
W=13.25,
fov=1,
N=7168,
Nx=128,
yB_size=1792,
yN_size=2240,
yP_size=3584,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"6k[1]-3k-256": dict(
W=13.25,
fov=1,
N=6144,
Nx=128,
yB_size=1536,
yN_size=1920,
yP_size=3072,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"5k[1]-2560-256": dict(
W=13.25,
fov=1,
N=5120,
Nx=128,
yB_size=1280,
yN_size=1600,
yP_size=2560,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"4k[1]-2k-256": dict(
W=13.25,
fov=1,
N=4096,
Nx=128,
yB_size=1024,
yN_size=1280,
yP_size=2048,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"3584[1]-1792-256": dict(
W=13.25,
fov=1,
N=3584,
Nx=128,
yB_size=896,
yN_size=1120,
yP_size=1792,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"3k[1]-1536-256": dict(
W=13.25,
fov=1,
N=3072,
Nx=128,
yB_size=768,
yN_size=960,
yP_size=1536,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"2560[1]-1280-256": dict(
W=13.25,
fov=1,
N=2560,
Nx=128,
yB_size=640,
yN_size=800,
yP_size=1280,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"2k[1]-1k-256": dict(
W=13.25,
fov=1,
N=2048,
Nx=128,
yB_size=512,
yN_size=640,
yP_size=1024,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"1792[1]-896-256": dict(
W=13.25,
fov=1,
N=1792,
Nx=128,
yB_size=448,
yN_size=560,
yP_size=896,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"1536[1]-768-256": dict(
W=13.25,
fov=1,
N=1536,
Nx=128,
yB_size=384,
yN_size=480,
yP_size=768,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"1280[1]-640-256": dict(
W=13.25,
fov=1,
N=1280,
Nx=128,
yB_size=320,
yN_size=400,
yP_size=640,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
"1k[1]-512-256": dict(
W=13.25,
fov=1,
N=1024,
Nx=128,
yB_size=256,
yN_size=320,
yP_size=512,
xA_size=128,
xM_size=256,
), # nfacet=4², eff 40.0%
}
| 20.97666
| 68
| 0.433096
| 15,444
| 104,254
| 2.777972
| 0.02985
| 0.063073
| 0.052327
| 0.059436
| 0.94124
| 0.884414
| 0.878633
| 0.876419
| 0.752605
| 0.705289
| 0
| 0.313403
| 0.419437
| 104,254
| 4,969
| 69
| 20.980881
| 0.395433
| 0.100735
| 0
| 0.892734
| 0
| 0
| 0.069822
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b05436011b1233f2b0def3c25a8fe5549b12ae02
| 2,116
|
py
|
Python
|
src/abaqus/PlotOptions/OdbContactDiagnostics.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/PlotOptions/OdbContactDiagnostics.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/PlotOptions/OdbContactDiagnostics.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
from .OdbAuxiliaryData import OdbAuxiliaryData
class OdbContactDiagnostics:
"""The OdbDiagnosticContact object.
Attributes
----------
data: dict[str, OdbAuxiliaryData]
A repository of :py:class:`~abaqus.PlotOptions.OdbAuxiliaryData.OdbAuxiliaryData` objects. This attribute is read-only.
description: str
A string specifying the opening/overclosure status of the contact. This attribute is
read-only.
detailStrings: tuple
A sequence of strings specifying the nature of each of the contact pair. This attribute
is read-only.
type: str
A string specifying the type of contact initialization. This attribute is read-only.
defaultFormats: str
A string specifying the default format value. This attribute is read-only.
elementDescriptions: str
A string specifying the element description. This attribute is read-only.
nodeDescriptions: str
A string specifying the node description. This attribute is read-only.
Notes
-----
This object can be accessed by:
.. code-block:: python
import visualization
session.odbData[name].diagnosticData.steps[i].contactDiagnostics[i]
"""
# A repository of OdbAuxiliaryData objects. This attribute is read-only.
data: dict[str, OdbAuxiliaryData] = dict[str, OdbAuxiliaryData]()
# A string specifying the opening/overclosure status of the contact. This attribute is
# read-only.
description: str = ''
# A sequence of strings specifying the nature of each of the contact pair. This attribute
# is read-only.
detailStrings: tuple = ()
# A string specifying the type of contact initialization. This attribute is read-only.
type: str = ''
# A string specifying the default format value. This attribute is read-only.
defaultFormats: str = ''
# A string specifying the element description. This attribute is read-only.
elementDescriptions: str = ''
# A string specifying the node description. This attribute is read-only.
nodeDescriptions: str = ''
| 35.864407
| 127
| 0.69896
| 250
| 2,116
| 5.916
| 0.256
| 0.123056
| 0.141988
| 0.179851
| 0.749155
| 0.749155
| 0.749155
| 0.702502
| 0.620014
| 0.620014
| 0
| 0
| 0.232042
| 2,116
| 58
| 128
| 36.482759
| 0.910154
| 0.764178
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
bbce3ed82fd85dc1d2b62c707692286aa9b12afa
| 102
|
py
|
Python
|
ceefax/helpers/time.py
|
mscroggs/CEEFAX
|
8e7a075de1809064b77360da24ebbbaa409c3bf2
|
[
"MIT"
] | 1
|
2020-03-28T15:53:22.000Z
|
2020-03-28T15:53:22.000Z
|
ceefax/helpers/time.py
|
mscroggs/CEEFAX
|
8e7a075de1809064b77360da24ebbbaa409c3bf2
|
[
"MIT"
] | 1
|
2021-02-05T13:43:52.000Z
|
2021-02-05T13:43:52.000Z
|
ceefax/helpers/time.py
|
mscroggs/CEEFAX
|
8e7a075de1809064b77360da24ebbbaa409c3bf2
|
[
"MIT"
] | null | null | null |
from datetime import datetime as _dt
def datetime(*args, **kwargs):
return _dt(*args, **kwargs)
| 17
| 36
| 0.696078
| 14
| 102
| 4.928571
| 0.642857
| 0.289855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 102
| 5
| 37
| 20.4
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
a59873240904ea9db5a2ce7eb948d025f7368f10
| 3,061
|
py
|
Python
|
cyclegenerator.py
|
lax1089/crypto-arbitrage-trader
|
7bad71d60490035568418c3bfa6291b2865ef3f0
|
[
"MIT"
] | 9
|
2018-06-13T09:04:24.000Z
|
2021-11-23T00:10:25.000Z
|
cyclegenerator.py
|
lax1089/crypto-arbitrage-trader
|
7bad71d60490035568418c3bfa6291b2865ef3f0
|
[
"MIT"
] | null | null | null |
cyclegenerator.py
|
lax1089/crypto-arbitrage-trader
|
7bad71d60490035568418c3bfa6291b2865ef3f0
|
[
"MIT"
] | 1
|
2022-03-30T06:32:20.000Z
|
2022-03-30T06:32:20.000Z
|
from cycle import Cycle
class CycleGenerator(object):
def generateCyclesNoFilter(markets):
cycles = []
counter = 0
for index1 in range(len(markets)):
markets[index1].print_market()
for index2 in range(index1+1, len(markets)):
for i in range(2):
for j in range(2):
if markets[index1].currencies[1-i] == markets[index2].currencies[j]:
for index3 in range(index2+1, len(markets)):
for k in range(2):
if markets[index2].currencies[1-j] == markets[index3].currencies[k] and markets[index1].currencies[i] == markets[index3].currencies[1-k]:
print('match '+markets[index1].code+' '+markets[index2].code+' '+markets[index3].code+' '+str(i==0)+' '+str(j==0)+' '+str(k==0))
currCycle = Cycle([markets[index1], markets[index2], markets[index3]], [(i==0), (j==0), (k==0)]).rotate(['ETH','BTC','USDT'])
# if currCycle.leftCurrency(0) == 'ETH' and (currCycle.leftCurrency(1) == 'USDT' or currCycle.leftCurrency(2) == 'USDT'):
if 1 == 1:
cycles+=[currCycle, currCycle.reverse()]
counter+=1
print('counter %d' %counter)
return cycles
def generateCycles(markets, currencyFilter):
cycles = []
counter = 0
for index1 in range(len(markets)):
markets[index1].print_market()
if (markets[index1].get_sourceCurrency() == currencyFilter):
for index2 in range(index1+1, len(markets)):
for i in range(2):
for j in range(2):
if markets[index1].currencies[1-i] == markets[index2].currencies[j]:
for index3 in range(index2+1, len(markets)):
for k in range(2):
if markets[index2].currencies[1-j] == markets[index3].currencies[k] and markets[index1].currencies[i] == markets[index3].currencies[1-k]:
print('match '+markets[index1].code+' '+markets[index2].code+' '+markets[index3].code+' '+str(i==0)+' '+str(j==0)+' '+str(k==0))
currCycle = Cycle([markets[index1], markets[index2], markets[index3]], [(i==0), (j==0), (k==0)]).rotate(['ETH','BTC','USDT'])
# if currCycle.leftCurrency(0) == 'ETH' and (currCycle.leftCurrency(1) == 'USDT' or currCycle.leftCurrency(2) == 'USDT'):
if 1 == 1:
cycles+=[currCycle, currCycle.reverse()]
counter+=1
print('counter %d' %counter)
return cycles
| 58.865385
| 177
| 0.465207
| 304
| 3,061
| 4.674342
| 0.151316
| 0.059113
| 0.033779
| 0.039409
| 0.8867
| 0.8867
| 0.8867
| 0.8867
| 0.8867
| 0.8867
| 0
| 0.042337
| 0.390395
| 3,061
| 51
| 178
| 60.019608
| 0.719185
| 0.078079
| 0
| 0.878049
| 1
| 0
| 0.022001
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.02439
| 0
| 0.146341
| 0.146341
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3c05f3a39b814ae5dd2ac553c5563c69332a50b2
| 158,959
|
py
|
Python
|
match_2step.py
|
wangpinggl/covidQA
|
f8b440ee27058cc1030af7cd57178ddf987462d0
|
[
"MIT"
] | null | null | null |
match_2step.py
|
wangpinggl/covidQA
|
f8b440ee27058cc1030af7cd57178ddf987462d0
|
[
"MIT"
] | null | null | null |
match_2step.py
|
wangpinggl/covidQA
|
f8b440ee27058cc1030af7cd57178ddf987462d0
|
[
"MIT"
] | null | null | null |
import numpy as np
import json
import datetime
import utility
import re
import random
from sumeval.metrics.rouge import RougeCalculator
rouge = RougeCalculator(stopwords=False, lang="en")
utility = utility.Utility()
month_dict = utility.month_dict
month_dict6 = utility.month_dict6
day_dict = utility.day_dict
today = str(datetime.date.today()).replace("-","")
state_dict = utility.state_dict
state_key = list(state_dict.keys())
state_val = list(state_dict.values())
def dateconvert1(day, month):
monthstr = str(month_dict[month])
daystr = str(day_dict[day])
datenum = monthstr + '_' + daystr + '_' + '2021'
return datenum
def dateconvert2(day, month):
monthstr = str(month_dict[month])
daystr = str(day_dict[day])
datenum = '2021' + monthstr + daystr
return datenum
def dateconvert6(day, month):
monthstr = str(month_dict[month])
daystr = str(day_dict[day])
datenum = '2021' + '-' + monthstr + '-' + daystr
return datenum
def monthconvert1(month):
monthstr = str(month_dict[month])
if monthstr == '01':
startdate = '01_01_2021'
enddate = '01_31_2021'
elif monthstr == '02':
startdate = '02_01_2021'
enddate = '02_28_2021'
elif monthstr == '03':
startdate = '03_01_2021'
enddate = '03_31_2021'
elif monthstr == '04':
startdate = '04_01_2021'
enddate = '04_30_2021'
elif monthstr == '05':
startdate = '05_01_2021'
enddate = '05_31_2021'
elif monthstr == '06':
startdate = '06_01_2021'
enddate = '06_30_2021'
elif monthstr == '07':
startdate = '07_01_2021'
enddate = '07_31_2021'
elif monthstr == '08':
startdate = '08_01_2021'
enddate = '08_31_2021'
elif monthstr == '09':
startdate = '09_01_2021'
enddate = '09_30_2021'
elif monthstr == '10':
startdate = '10_01_2021'
enddate = '10_31_2021'
elif monthstr == '11':
startdate = '11_01_2021'
enddate = '11_30_2021'
else:
startdate = '12_01_2021'
enddate = '12_31_2021'
return startdate, enddate
def monthconvert2(month):
monthstr = str(month_dict[month])
if monthstr == '01':
startdate = '20210101'
enddate = '20210131'
elif monthstr == '02':
startdate = '20210201'
enddate = '20210228'
elif monthstr == '03':
startdate = '20210301'
enddate = '20210331'
elif monthstr == '04':
startdate = '20210401'
enddate = '20210430'
elif monthstr == '05':
startdate = '20210501'
enddate = '20210531'
elif monthstr == '06':
startdate = '20210601'
enddate = '20210630'
elif monthstr == '07':
startdate = '20210701'
enddate = '20210731'
elif monthstr == '08':
startdate = '20210801'
enddate = '20210831'
elif monthstr == '09':
startdate = '20210901'
enddate = '20210930'
elif monthstr == '10':
startdate = '20211001'
enddate = '20211031'
elif monthstr == '11':
startdate = '20211101'
enddate = '20211130'
else:
startdate = '20211201'
enddate = '20211231'
return startdate, enddate
def monthconvert4(month):
monthstr = str(month_dict[month])
if monthstr == '01':
startdate = '2021-01-01'
enddate = '2021-01-31'
elif monthstr == '02':
startdate = '2021-02-01'
enddate = '2021-02-28'
elif monthstr == '03':
startdate = '2021-03-01'
enddate = '2021-03-31'
elif monthstr == '04':
startdate = '2021-04-01'
enddate = '2021-04-30'
elif monthstr == '05':
startdate = '2021-05-01'
enddate = '2021-05-31'
elif monthstr == '06':
startdate = '2021-06-01'
enddate = '2021-06-30'
elif monthstr == '07':
startdate = '2021-07-01'
enddate = '2021-07-31'
elif monthstr == '08':
startdate = '2021-08-01'
enddate = '2021-08-31'
elif monthstr == '09':
startdate = '2021-09-01'
enddate = '2021-09-30'
elif monthstr == '10':
startdate = '2021-10-01'
enddate = '2021-10-31'
elif monthstr == '11':
startdate = '2021-11-01'
enddate = '2021-11-30'
else:
startdate = '2021-12-01'
enddate = '2021-12-31'
return startdate, enddate
case_entity = utility.case_entity
demographic_entity = utility.demographic_entity
entities = utility.entities
amount_entity = utility.amount_entity
with open('templist.json') as json_file:
data1 = json.load(json_file)
with open('entitylist.json') as json_file:
data3 = json.load(json_file)
with open('natural_question.json') as json_file:
data5 = json.load(json_file)
genderlist = data3['Gender']
testlist = data3['Testing Entity']
bedlist = data3['Bed Entity']
ratelist = data3['Rate Entity']
ratelist2 = data3['Rate Entity2']
moblist = data3['Mobility Entity']
hoslist = data3['Hospitalization Entity']
statelist = data3['State Entity']
caselist = data3['Cases']
caselist2 = data3['Cases2']
countrylist = data3['Country Entity']
monthlist = data3['Month']
daylist = data3['Day']
numberlist = data3['Number']
valuelist = data3['Value Entity']
amountlist = data3['Amount Entity']
racelist = data3['Race']
demlist = data3['Demographic Entity']
countylist = data3['County Entity']
provincelist = data3['Province Entity']
def find_best2(input_, pool_):
score_ = []
for itm in pool_:
input_ = input_.lower()
itm = str(itm).lower()
score_.append(rouge.rouge_n(summary=input_, references=itm, n=1))
if np.sum(score_) == 0:
score_ = []
input2_ = ' '.join(list(input_)).lower()
for itm in pool_:
itm2 = ' '.join(list(str(itm))).lower()
score_.append(rouge.rouge_n(summary=input2_, references=itm2, n=1))
return str(pool_[np.argmax(score_)])
for item in data5.values():
real_question = item[0]["real_question"]
question_temp = item[0]["question"]
query = item[0]["sql"]
templist = list(data1)
bucket = {}
test = find_best2(real_question, templist)
for items in data1[test]:
for item1 in data3[items]:
if real_question.find(item1) != -1:
bucket.update({items: item1})
if items == 'County Entity' or items == 'Province Entity':
bucket.update({items: item1.split(", ")[0]})
if data1[test] != list(bucket.keys()):
templist.remove(test)
test = find_best2(real_question, templist)
bucket = {}
for item2 in data1[test]:
for item3 in data3[item2]:
if real_question.find(item3) != -1:
bucket.update({item2: item3})
if item == 'County Entity' or item == 'Province Entity':
bucket.update({item: item1.split(", ")[0]})
bucket.update({'matched temp': test})
real_question = bucket['matched temp']
try:
gender = bucket['Gender']
except KeyError:
pass
try:
tes = bucket['Testing Entity']
except KeyError:
tes = ''
try:
bed = bucket['Bed Entity']
except KeyError:
pass
try:
rat = bucket['Rate Entity']
except KeyError:
rat = ''
try:
rat = bucket['Rate Entity2']
except KeyError:
rat = ''
try:
mob = bucket['Mobility Entity']
except KeyError:
pass
try:
hos = bucket['Hospitalization Entity']
except KeyError:
pass
try:
state = bucket['State Entity']
state_name = bucket['State Entity']
except KeyError:
pass
try:
cas = bucket['Cases']
except KeyError:
pass
try:
cas = bucket['Cases2']
except KeyError:
pass
try:
country_name = bucket['Country Entity']
country = bucket['Country Entity']
except KeyError:
pass
try:
mon = bucket['Month']
except KeyError:
pass
try:
num = bucket['Number']
except KeyError:
pass
try:
day = bucket['Day']
except KeyError:
pass
try:
val = bucket['Value Entity']
except KeyError:
pass
try:
amo = bucket['Amount Entity']
except KeyError:
pass
try:
rac = bucket['Race']
except KeyError:
pass
try:
dem = bucket['Demographic Entity']
except KeyError:
pass
try:
county_name = bucket['County Entity']
except KeyError:
pass
try:
province_name = bucket['Province Entity']
except KeyError:
pass
if real_question == "How many (Cases) occurred in (State Entity) in (Day), (Month)?":
sql = "Select SUM(Cases) from db1_state_date where (Location)"
given_date = dateconvert1(day, mon)
sql = sql.replace("date", given_date)
if cas == 'confirmed cases' or cas == 'cases' or cas == 'cases increased' or cas == 'confirmed cases increased':
sql = sql.replace("Cases", "Confirmed")
elif cas == 'daily cases' or cas == 'new cases':
sql = sql.replace("Cases", 'Confirmed')
elif cas == 'deaths' or cas == 'deaths increased':
sql = sql.replace("Cases", "Deaths")
elif cas == 'active cases' or cas == 'active cases increased':
sql = sql.replace("Cases", "Active")
elif cas == 'recovered cases' or cas == 'recovered cases increased':
sql = sql.replace("Cases", "Recovered")
sql = sql.replace("(Location)", "Province_State = \"" + state + "\"")
bucket.update({'sql': sql})
if real_question == "Give me the number of (Cases) occurred in (County Entity), (State Entity) in (Day), (Month).":
sql = "Select SUM(Cases) from db1_date where (Location)"
given_date = dateconvert1(day, mon)
sql = sql.replace("date", given_date)
if cas == 'confirmed cases' or cas == 'cases' or cas == 'cases increased' or cas == 'confirmed cases increased':
sql = sql.replace("Cases", "Confirmed")
elif cas == 'daily cases' or cas == 'new cases':
sql = sql.replace("Cases", 'Confirmed')
elif cas == 'deaths' or cas == 'deaths increased':
sql = sql.replace("Cases", "Deaths")
elif cas == 'active cases' or cas == 'active cases increased':
sql = sql.replace("Cases", "Active")
elif cas == 'recovered cases' or cas == 'recovered cases increased':
sql = sql.replace("Cases", "Recovered")
sql = sql.replace("(Location)", "Province_State = \"" + state_name + "\" and Admin2 = \"" + county_name + "\" ")
bucket.update({'sql': sql})
if real_question == "Provide me with the number of (Cases) occurred in (Province Entity), (Country Entity) in (Month).":
sql = "Select (Select SUM(Cases) from db1_end_date where (Location)) - (Select SUM(Cases) from db1_start_date where (Location))"
start_date, end_date = monthconvert1(mon)
sql = sql.replace("end_date", end_date)
sql = sql.replace("start_date", start_date)
if cas == 'confirmed cases' or cas == 'cases' or cas == 'cases increased' or cas == 'confirmed cases increased':
sql = sql.replace("Cases", "Confirmed")
elif cas == 'daily cases' or cas == 'new cases':
sql = sql.replace("Cases", 'Confirmed')
elif cas == 'deaths' or cas == 'deaths increased':
sql = sql.replace("Cases", "Deaths")
elif cas == 'active cases' or cas == 'active cases increased':
sql = sql.replace("Cases", "Active")
elif cas == 'recovered cases' or cas == 'recovered cases increased':
sql = sql.replace("Cases", "Recovered")
sql = sql.replace("(Location)",
"Province_State = \"" + province_name + "\" and Country_Region = \"" + country_name + "\" ")
bucket.update({'sql': sql})
if real_question == "List the number of (Cases) occurred in (Country Entity) in (Month).":
sql = "Select (Select SUM(Cases) from db1_end_date where (Location)) - (Select SUM(Cases) from db1_start_date where (Location))"
start_date, end_date = monthconvert1(mon)
sql = sql.replace("end_date", end_date)
sql = sql.replace("start_date", start_date)
if cas == 'confirmed cases' or cas == 'cases' or cas == 'cases increased' or cas == 'confirmed cases increased':
sql = sql.replace("Cases", "Confirmed")
elif cas == 'daily cases' or cas == 'new cases':
sql = sql.replace("Cases", 'Confirmed')
elif cas == 'deaths' or cas == 'deaths increased':
sql = sql.replace("Cases", "Deaths")
elif cas == 'active cases' or cas == 'active cases increased':
sql = sql.replace("Cases", "Active")
elif cas == 'recovered cases' or cas == 'recovered cases increased':
sql = sql.replace("Cases", "Recovered")
if country == 'United States':
sql = sql.replace("(Location)", "Country_Region = 'US' ")
else:
sql = sql.replace("(Location)", "Country_Region = \"" + country + "\" ")
bucket.update({'sql': sql})
if real_question == "What is the (Rate Entity) in (State Entity) in (Day), (Month)?":
sql = "Select (Rate) from db1_state_given_date where (Location)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if rat == 'incidence rate':
sql = sql.replace("(Rate)", 'SUM(Confirmed)/SUM(Confirmed*100000/Incidence_Rate) * 100000')
elif rat == 'case-fatality rate':
sql = sql.replace("(Rate)", 'SUM(Deaths)*100.0/SUM(Confirmed)')
elif rat == 'recovery rate':
sql = sql.replace("(Rate)", 'SUM(Recovered)*100.0./SUM(Confirmed)')
else:
sql = sql.replace("(Rate)", 'Testing_Rate')
sql = sql.replace("(Location)", "Province_State = \"" + state + "\"")
bucket.update({'sql': sql})
if real_question == "Give me the number of (Rate Entity) in (County Entity), (State Entity) in (Day), (Month).":
sql = "Select (Rate) from db1_given_date where (Location)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if rat == 'incidence rate':
sql = sql.replace("(Rate)", 'SUM(Confirmed)/SUM(Confirmed*100000/Incidence_Rate) * 100000')
elif rat == 'case-fatality rate':
sql = sql.replace("(Rate)", 'SUM(Deaths)*100.0/SUM(Confirmed)')
elif rat == 'recovery rate':
sql = sql.replace("(Rate)", 'SUM(Recovered)*100.0./SUM(Confirmed)')
else:
sql = sql.replace("(Rate)", 'Testing_Rate')
sql = sql.replace("(Location)", "Province_State = \"" + state_name + "\" and Admin2 = \"" + county_name + "\" ")
bucket.update({'sql': sql})
if real_question == "Provide me with the number of (Rate Entity) in (Province Entity), (Country Entity) in (Day), (Month).":
sql = "Select (Rate) from db1_given_date where (Location)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if rat == 'incidence rate':
sql = sql.replace("(Rate)", 'SUM(Confirmed)/SUM(Confirmed*100000/Incidence_Rate) * 100000')
elif rat == 'case-fatality rate':
sql = sql.replace("(Rate)", 'SUM(Deaths)*100.0/SUM(Confirmed)')
elif rat == 'recovery rate':
sql = sql.replace("(Rate)", 'SUM(Recovered)*100.0./SUM(Confirmed)')
else:
sql = sql.replace("(Rate)", 'Testing_Rate')
sql = sql.replace("(Location)",
"Province_State = \"" + province_name + "\" and Country_Region = \"" + country_name + "\" ")
bucket.update({'sql': sql})
if real_question == "List the number of (Rate Entity) in (Country Entity) in (Day), (Month).":
sql = "Select (Rate) from db1_given_date where (Location)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if rat == 'incidence rate':
sql = sql.replace("(Rate)", 'SUM(Confirmed)/SUM(Confirmed*100000/Incidence_Rate) * 100000')
elif rat == 'case-fatality rate':
sql = sql.replace("(Rate)", 'SUM(Deaths)*100.0/SUM(Confirmed)')
elif rat == 'recovery rate':
sql = sql.replace("(Rate)", 'SUM(Recovered)*100.0./SUM(Confirmed)')
else:
sql = sql.replace("(Rate)", 'Testing_Rate')
if country == 'United States':
sql = sql.replace("(Location)", "Country_Region = 'US' ")
else:
sql = sql.replace("(Location)", "Country_Region = \"" + country + "\" ")
bucket.update({'sql': sql})
if real_question == "What State has the (Value Entity) number of (Case Entity) in (Day), (Month)?":
sql = "Select Province_State from db1_state_given_date where Province_State is not null Group by Province_State order by SUM(Cases) (Value Entity)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if cas == 'confirmed cases' or cas == 'cases' or cas == 'cases increased' or cas == 'confirmed cases increased':
sql = sql.replace("Cases", "Confirmed")
elif cas == 'daily cases' or cas == 'new cases':
sql = sql.replace("Cases", 'Confirmed')
elif cas == 'deaths' or cas == 'deaths increased':
sql = sql.replace("Cases", "Deaths")
elif cas == 'active cases' or cas == 'active cases increased':
sql = sql.replace("Cases", "Active")
elif cas == 'recovered cases' or cas == 'recovered cases increased':
sql = sql.replace("Cases", "Recovered")
if val == 'highest' or val == 'most':
sql = sql.replace('(Value Entity)', 'desc limit 0' + ', 1')
else:
sql = sql.replace('(Value Entity)', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Give me the State that has the (Value Entity) number of (Case Entity) in (Day), (Month).":
sql = "Select Province_State from db1_state_given_date where Province_State is not null Group by Province_State order by SUM(Cases) (Value Entity)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if cas == 'confirmed cases' or cas == 'cases' or cas == 'cases increased' or cas == 'confirmed cases increased':
sql = sql.replace("Cases", "Confirmed")
elif cas == 'daily cases' or cas == 'new cases':
sql = sql.replace("Cases", 'Confirmed')
elif cas == 'deaths' or cas == 'deaths increased':
sql = sql.replace("Cases", "Deaths")
elif cas == 'active cases' or cas == 'active cases increased':
sql = sql.replace("Cases", "Active")
elif cas == 'recovered cases' or cas == 'recovered cases increased':
sql = sql.replace("Cases", "Recovered")
if val == 'highest' or val == 'most':
sql = sql.replace('(Value Entity)', 'desc limit 0' + ', 1')
else:
sql = sql.replace('(Value Entity)', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Provide me with the State that has the (Value Entity) number of (Case Entity) in (Day), (Month).":
sql = "Select Province_State from db1_state_given_date where Province_State is not null Group by Province_State order by SUM(Cases) (Value Entity)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if cas == 'confirmed cases' or cas == 'cases' or cas == 'cases increased' or cas == 'confirmed cases increased':
sql = sql.replace("Cases", "Confirmed")
elif cas == 'daily cases' or cas == 'new cases':
sql = sql.replace("Cases", 'Confirmed')
elif cas == 'deaths' or cas == 'deaths increased':
sql = sql.replace("Cases", "Deaths")
elif cas == 'active cases' or cas == 'active cases increased':
sql = sql.replace("Cases", "Active")
elif cas == 'recovered cases' or cas == 'recovered cases increased':
sql = sql.replace("Cases", "Recovered")
if val == 'highest' or val == 'most':
sql = sql.replace('(Value Entity)', 'desc limit 0' + ', 1')
else:
sql = sql.replace('(Value Entity)', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "List the State that has the (Value Entity) number of (Case Entity) in (Day), (Month).":
sql = "Select Province_State from db1_state_given_date where Province_State is not null Group by Province_State order by SUM(Cases) (Value Entity)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if cas == 'confirmed cases' or cas == 'cases' or cas == 'cases increased' or cas == 'confirmed cases increased':
sql = sql.replace("Cases", "Confirmed")
elif cas == 'daily cases' or cas == 'new cases':
sql = sql.replace("Cases", 'Confirmed')
elif cas == 'deaths' or cas == 'deaths increased':
sql = sql.replace("Cases", "Deaths")
elif cas == 'active cases' or cas == 'active cases increased':
sql = sql.replace("Cases", "Active")
elif cas == 'recovered cases' or cas == 'recovered cases increased':
sql = sql.replace("Cases", "Recovered")
if val == 'highest' or val == 'most':
sql = sql.replace('(Value Entity)', 'desc limit 0' + ', 1')
else:
sql = sql.replace('(Value Entity)', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Which state has the (Value Entity) (Rate Entity) in (Day), (Month)?":
sql = "Select Province_State from db1_state_given_date where Province_State is not null group by Province_State having (Rate) is not null order by (Rate) (Value Entity)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if rat == 'incidence rate':
sql = sql.replace("(Rate)", 'SUM(Confirmed)/SUM(Confirmed*100000/Incidence_Rate) * 100000')
elif rat == 'case-fatality rate':
sql = sql.replace("(Rate)", 'SUM(Deaths)*100.0/SUM(Confirmed)')
elif rat == 'recovery rate':
sql = sql.replace("(Rate)", 'SUM(Recovered)*100.0./SUM(Confirmed)')
else:
sql = sql.replace("(Rate)", 'Testing_Rate')
if val == 'highest' or val == 'most':
sql = sql.replace('(Value Entity)', 'desc limit 0' + ', 1')
else:
sql = sql.replace('(Value Entity)', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Give me the county that has the (Value Entity) (Rate Entity) in (Day), (Month).":
sql = "Select Admin2, Province_State from db1_given_date where Admin2 is not null and Province_State is not null group by Admin2, Province_State having (Rate) is not null order by (Rate) (Value Entity)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if rat == 'incidence rate':
sql = sql.replace("(Rate)", 'SUM(Confirmed)/SUM(Confirmed*100000/Incidence_Rate) * 100000')
elif rat == 'case-fatality rate':
sql = sql.replace("(Rate)", 'SUM(Deaths)*100.0/SUM(Confirmed)')
elif rat == 'recovery rate':
sql = sql.replace("(Rate)", 'SUM(Recovered)*100.0./SUM(Confirmed)')
else:
sql = sql.replace("(Rate)", 'Testing_Rate')
if val == 'highest' or val == 'most':
sql = sql.replace('(Value Entity)', 'desc limit 0' + ', 1')
else:
sql = sql.replace('(Value Entity)', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Provide me with the province that has the (Value Entity) (Rate Entity) in (Day), (Month).":
sql = "Select Province_State, Country_Region from db1_given_date where Country_Region != 'US' and Province_State is not null and Country_Region is not null group by Province_State, Country_Region having (Rate) is not null order by (Rate) (Value Entity)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if rat == 'incidence rate':
sql = sql.replace("(Rate)", 'SUM(Confirmed)/SUM(Confirmed*100000/Incidence_Rate) * 100000')
elif rat == 'case-fatality rate':
sql = sql.replace("(Rate)", 'SUM(Deaths)*100.0/SUM(Confirmed)')
elif rat == 'recovery rate':
sql = sql.replace("(Rate)", 'SUM(Recovered)*100.0./SUM(Confirmed)')
else:
sql = sql.replace("(Rate)", 'Testing_Rate')
if val == 'highest' or val == 'most':
sql = sql.replace('(Value Entity)', 'desc limit 0' + ', 1')
else:
sql = sql.replace('(Value Entity)', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "List the country that has the (Value Entity) (Rate Entity) in (Day), (Month).":
sql = "Select Country_Region from db1_given_date where Country_Region is not null group by Country_Region having (Rate) is not null order by (Rate) (Value Entity)"
given_date = dateconvert1(day, mon)
sql = sql.replace("given_date", given_date)
if rat == 'incidence rate':
sql = sql.replace("(Rate)", 'SUM(Confirmed)/SUM(Confirmed*100000/Incidence_Rate) * 100000')
elif rat == 'case-fatality rate':
sql = sql.replace("(Rate)", 'SUM(Deaths)*100.0/SUM(Confirmed)')
elif rat == 'recovery rate':
sql = sql.replace("(Rate)", 'SUM(Recovered)*100.0./SUM(Confirmed)')
else:
sql = sql.replace("(Rate)", 'Testing_Rate')
if val == 'highest' or val == 'most':
sql = sql.replace('(Value Entity)', 'desc limit 0' + ', 1')
else:
sql = sql.replace('(Value Entity)', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "What is the (Rate Entity) in (State Entity)?":
sql = "Select Rate Entity Column from db2State where date = 'current date' and state = \"State Entity\" "
if rat == 'daily percent positive rate':
sql = sql.replace("Rate Entity Column", "positiveIncrease * 100.0 /totalTestResultsIncrease")
elif rat == 'daily percent negative rate':
sql = sql.replace("Rate Entity Column", "negativeIncrease * 100.0/totalTestResultsIncrease")
elif rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "positive * 100.0/totalTestResults")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "negative * 100.0/totalTestResults")
else:
sql = sql.replace("Rate Entity Column", "hospitalizedCumulative * 100.0/positive")
sql = sql.replace('current date', today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Give me the (Rate Entity) in (State Entity).":
sql = "Select Rate Entity Column from db2State where date = 'current date' and state = \"State Entity\" "
if rat == 'daily percent positive rate':
sql = sql.replace("Rate Entity Column", "positiveIncrease * 100.0 /totalTestResultsIncrease")
elif rat == 'daily percent negative rate':
sql = sql.replace("Rate Entity Column", "negativeIncrease * 100.0/totalTestResultsIncrease")
elif rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "positive * 100.0/totalTestResults")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "negative * 100.0/totalTestResults")
else:
sql = sql.replace("Rate Entity Column", "hospitalizedCumulative * 100.0/positive")
sql = sql.replace('current date', today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Provide me with the (Rate Entity) in (State Entity).":
sql = "Select Rate Entity Column from db2State where date = 'current date' and state = \"State Entity\" "
if rat == 'daily percent positive rate':
sql = sql.replace("Rate Entity Column", "positiveIncrease * 100.0 /totalTestResultsIncrease")
elif rat == 'daily percent negative rate':
sql = sql.replace("Rate Entity Column", "negativeIncrease * 100.0/totalTestResultsIncrease")
elif rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "positive * 100.0/totalTestResults")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "negative * 100.0/totalTestResults")
else:
sql = sql.replace("Rate Entity Column", "hospitalizedCumulative * 100.0/positive")
sql = sql.replace('current date', today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "List the (Rate Entity) in (State Entity).":
sql = "Select Rate Entity Column from db2State where date = 'current date' and state = \"State Entity\" "
if rat == 'daily percent positive rate':
sql = sql.replace("Rate Entity Column", "positiveIncrease * 100.0 /totalTestResultsIncrease")
elif rat == 'daily percent negative rate':
sql = sql.replace("Rate Entity Column", "negativeIncrease * 100.0/totalTestResultsIncrease")
elif rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "positive * 100.0/totalTestResults")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "negative * 100.0/totalTestResults")
else:
sql = sql.replace("Rate Entity Column", "hospitalizedCumulative * 100.0/positive")
sql = sql.replace('current date', today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Which state has the (Value Entity) number of people (Hospitalization Entity)?":
sql = "Select state from db2state where date = 'given date' and (Null) order by Hospitalization Entity Column Value Entity"
if hos == 'Currently in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCurrently")
sql = sql.replace("(Null)", "inICUCurrently is not null")
elif hos == 'Cumulatively in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCumulative")
sql = sql.replace("(Null)", "inICUCumulative is not null")
elif hos == 'Currently on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCurrently")
sql = sql.replace("(Null)", "onVentilatorCurrently is not null")
elif hos == 'Cumulatively on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCumulative")
sql = sql.replace("(Null)", "onVentilatorCumulative is not null")
elif hos == 'Cumulatively hospitalized':
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCumulative")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null")
else:
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCurrently")
sql = sql.replace("(Null)", "hospitalizedCurrently is not null")
sql = sql.replace("given date", today)
bucket.update({'sql': sql})
if real_question == "Give me the state with the (Value Entity) number of people (Hospitalization Entity).":
sql = "Select state from db2state where date = 'given date' and (Null) order by Hospitalization Entity Column Value Entity"
if hos == 'Currently in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCurrently")
sql = sql.replace("(Null)", "inICUCurrently is not null")
elif hos == 'Cumulatively in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCumulative")
sql = sql.replace("(Null)", "inICUCumulative is not null")
elif hos == 'Currently on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCurrently")
sql = sql.replace("(Null)", "onVentilatorCurrently is not null")
elif hos == 'Cumulatively on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCumulative")
sql = sql.replace("(Null)", "onVentilatorCumulative is not null")
elif hos == 'Cumulatively hospitalized':
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCumulative")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null")
else:
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCurrently")
sql = sql.replace("(Null)", "hospitalizedCurrently is not null")
sql = sql.replace("given date", today)
bucket.update({'sql': sql})
if real_question == "Provide me with the state with the (Value Entity) number of people (Hospitalization Entity).":
sql = "Select state from db2state where date = 'given date' and (Null) order by Hospitalization Entity Column Value Entity"
if hos == 'Currently in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCurrently")
sql = sql.replace("(Null)", "inICUCurrently is not null")
elif hos == 'Cumulatively in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCumulative")
sql = sql.replace("(Null)", "inICUCumulative is not null")
elif hos == 'Currently on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCurrently")
sql = sql.replace("(Null)", "onVentilatorCurrently is not null")
elif hos == 'Cumulatively on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCumulative")
sql = sql.replace("(Null)", "onVentilatorCumulative is not null")
elif hos == 'Cumulatively hospitalized':
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCumulative")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null")
else:
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCurrently")
sql = sql.replace("(Null)", "hospitalizedCurrently is not null")
sql = sql.replace("given date", today)
bucket.update({'sql': sql})
if real_question == "List the state with the (Value Entity) number of people (Hospitalization Entity).":
sql = "Select state from db2state where date = 'given date' and (Null) order by Hospitalization Entity Column Value Entity"
if hos == 'Currently in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCurrently")
sql = sql.replace("(Null)", "inICUCurrently is not null")
elif hos == 'Cumulatively in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCumulative")
sql = sql.replace("(Null)", "inICUCumulative is not null")
elif hos == 'Currently on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCurrently")
sql = sql.replace("(Null)", "onVentilatorCurrently is not null")
elif hos == 'Cumulatively on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCumulative")
sql = sql.replace("(Null)", "onVentilatorCumulative is not null")
elif hos == 'Cumulatively hospitalized':
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCumulative")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null")
else:
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCurrently")
sql = sql.replace("(Null)", "hospitalizedCurrently is not null")
sql = sql.replace("given date", today)
bucket.update({'sql': sql})
if real_question == "How many people are (Hospitalization Entity) in (State Entity)?":
sql = "Select Hospitalization Entity Column from db2State where date = 'current date' and state = \"State Entity\" "
if hos == 'Currently in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCurrently")
sql = sql.replace("(Null)", "inICUCurrently is not null")
elif hos == 'Cumulatively in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCumulative")
sql = sql.replace("(Null)", "inICUCumulative is not null")
elif hos == 'Currently on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCurrently")
sql = sql.replace("(Null)", "onVentilatorCurrently is not null")
elif hos == 'Cumulatively on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCumulative")
sql = sql.replace("(Null)", "onVentilatorCumulative is not null")
elif hos == 'Cumulatively hospitalized':
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCumulative")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null")
else:
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCurrently")
sql = sql.replace("(Null)", "hospitalizedCurrently is not null")
sql = sql.replace("current date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Give me the number of people who are (Hospitalization Entity) in (State Entity).":
sql = "Select Hospitalization Entity Column from db2State where date = 'current date' and state = \"State Entity\" "
if hos == 'Currently in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCurrently")
sql = sql.replace("(Null)", "inICUCurrently is not null")
elif hos == 'Cumulatively in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCumulative")
sql = sql.replace("(Null)", "inICUCumulative is not null")
elif hos == 'Currently on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCurrently")
sql = sql.replace("(Null)", "onVentilatorCurrently is not null")
elif hos == 'Cumulatively on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCumulative")
sql = sql.replace("(Null)", "onVentilatorCumulative is not null")
elif hos == 'Cumulatively hospitalized':
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCumulative")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null")
else:
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCurrently")
sql = sql.replace("(Null)", "hospitalizedCurrently is not null")
sql = sql.replace("current date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Provide me with the number of people who are (Hospitalization Entity) in (State Entity).":
sql = "Select Hospitalization Entity Column from db2State where date = 'current date' and state = \"State Entity\" "
if hos == 'Currently in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCurrently")
sql = sql.replace("(Null)", "inICUCurrently is not null")
elif hos == 'Cumulatively in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCumulative")
sql = sql.replace("(Null)", "inICUCumulative is not null")
elif hos == 'Currently on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCurrently")
sql = sql.replace("(Null)", "onVentilatorCurrently is not null")
elif hos == 'Cumulatively on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCumulative")
sql = sql.replace("(Null)", "onVentilatorCumulative is not null")
elif hos == 'Cumulatively hospitalized':
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCumulative")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null")
else:
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCurrently")
sql = sql.replace("(Null)", "hospitalizedCurrently is not null")
sql = sql.replace("current date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "List the number of people who are (Hospitalization Entity) in (State Entity).":
sql = "Select Hospitalization Entity Column from db2State where date = 'current date' and state = \"State Entity\" "
if hos == 'Currently in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCurrently")
sql = sql.replace("(Null)", "inICUCurrently is not null")
elif hos == 'Cumulatively in ICU':
sql = sql.replace("Hospitalization Entity Column", "inICUCumulative")
sql = sql.replace("(Null)", "inICUCumulative is not null")
elif hos == 'Currently on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCurrently")
sql = sql.replace("(Null)", "onVentilatorCurrently is not null")
elif hos == 'Cumulatively on ventilators':
sql = sql.replace("Hospitalization Entity Column", "onVentilatorCumulative")
sql = sql.replace("(Null)", "onVentilatorCumulative is not null")
elif hos == 'Cumulatively hospitalized':
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCumulative")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null")
else:
sql = sql.replace("Hospitalization Entity Column", "hospitalizedCurrently")
sql = sql.replace("(Null)", "hospitalizedCurrently is not null")
sql = sql.replace("current date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "What is the number of (Testing Entity) done in (State Entity) in (Day), (Month)?":
sql = "Select Testing Entity Column from db2State where date = 'Time Entity' and state = \"State Entity\" "
if tes == 'total tests' or tes == 'total tests increased':
sql = sql.replace("Testing Entity Column", 'totalTestResults')
elif tes == 'positive tests' or tes == 'positive tests increased':
sql = sql.replace("Testing Entity Column", 'positive')
elif tes == 'negative tests' or tes == 'negative tests increased':
sql = sql.replace("Testing Entity Column", 'negative')
else:
sql = sql.replace("Testing Entity Column", 'totalTestResultsIncrease')
given_date = dateconvert2(day, mon)
sql = sql.replace("Time Entity", given_date)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Give me the number of (Testing Entity) done in (State Entity) in (Day), (Month).":
sql = "Select Testing Entity Column from db2State where date = 'Time Entity' and state = \"State Entity\" "
if tes == 'total tests' or tes == 'total tests increased':
sql = sql.replace("Testing Entity Column", 'totalTestResults')
elif tes == 'positive tests' or tes == 'positive tests increased':
sql = sql.replace("Testing Entity Column", 'positive')
elif tes == 'negative tests' or tes == 'negative tests increased':
sql = sql.replace("Testing Entity Column", 'negative')
else:
sql = sql.replace("Testing Entity Column", 'totalTestResultsIncrease')
given_date = dateconvert2(day, mon)
sql = sql.replace("Time Entity", given_date)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Provide me with the number of (Testing Entity) done in (State Entity) in (Month).":
sql = "Select (Select Testing Entity Column from db2State where date = 'Time End' and state = \"State Entity\") - (Select Testing Entity Column from db2State where date = 'Time Start' and state = \"State Entity\")"
if tes == 'total tests' or tes == 'total tests increased':
sql = sql.replace("Testing Entity Column", 'totalTestResults')
elif tes == 'positive tests' or tes == 'positive tests increased':
sql = sql.replace("Testing Entity Column", 'positive')
elif tes == 'negative tests' or tes == 'negative tests increased':
sql = sql.replace("Testing Entity Column", 'negative')
else:
break
start_date, end_date = monthconvert2(mon)
sql = sql.replace("Time End", end_date)
sql = sql.replace("Time Start", start_date)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "List the number of (Testing Entity) done in (State Entity) in (Month).":
sql = "Select (Select Testing Entity Column from db2State where date = 'Time End' and state = \"State Entity\") - (Select Testing Entity Column from db2State where date = 'Time Start' and state = \"State Entity\")"
if tes == 'total tests' or tes == 'total tests increased':
sql = sql.replace("Testing Entity Column", 'totalTestResults')
elif tes == 'positive tests' or tes == 'positive tests increased':
sql = sql.replace("Testing Entity Column", 'positive')
elif tes == 'negative tests' or tes == 'negative tests increased':
sql = sql.replace("Testing Entity Column", 'negative')
else:
break
start_date, end_date = monthconvert2(mon)
sql = sql.replace("Time End", end_date)
sql = sql.replace("Time Start", start_date)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Which state has the (Value Entity) (Testing Entity) in (Day), (Month)?":
sql = "Select state from db2State where date = 'Time Entity' and (Null) order by Testing Entity Column Value Entity"
if tes == 'total tests' or tes == 'total tests increased':
sql = sql.replace("Testing Entity Column", 'totalTestResults')
sql = sql.replace("(Null)", "totalTestResults is not null")
elif tes == 'positive tests' or tes == 'positive tests increased':
sql = sql.replace("Testing Entity Column", 'positive')
sql = sql.replace("(Null)", "positive is not null")
elif tes == 'negative tests' or tes == 'negative tests increased':
sql = sql.replace("Testing Entity Column", 'negative')
sql = sql.replace("(Null)", "negative is not null")
else:
sql = sql.replace("Testing Entity Column", 'totalTestResultsIncrease')
sql = sql.replace("(Null)", "totalTestResultsIncrease is not null")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
given_date = dateconvert2(day, mon)
sql = sql.replace("Time Entity", given_date)
bucket.update({'sql': sql})
if real_question == "Give me the state with the (Value Entity) (Testing Entity) in (Day), (Month).":
sql = "Select state from db2State where date = 'Time Entity' and (Null) order by Testing Entity Column Value Entity"
if tes == 'total tests' or tes == 'total tests increased':
sql = sql.replace("Testing Entity Column", 'totalTestResults')
sql = sql.replace("(Null)", "totalTestResults is not null")
elif tes == 'positive tests' or tes == 'positive tests increased':
sql = sql.replace("Testing Entity Column", 'positive')
sql = sql.replace("(Null)", "positive is not null")
elif tes == 'negative tests' or tes == 'negative tests increased':
sql = sql.replace("Testing Entity Column", 'negative')
sql = sql.replace("(Null)", "negative is not null")
else:
sql = sql.replace("Testing Entity Column", 'totalTestResultsIncrease')
sql = sql.replace("(Null)", "totalTestResultsIncrease is not null")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
given_date = dateconvert2(day, mon)
sql = sql.replace("Time Entity", given_date)
bucket.update({'sql': sql})
if real_question == "Provide me with the state with the (Value Entity) (Testing Entity) in (Month).":
sql = "Select t1.state from (Select state, Testing Entity Column from db2State where date = 'Time End' and (Null)) as t1 Inner Join (Select state, Testing Entity Column from db2State where date = 'Time Start' and (Null)) as t2 on t1.state = t2.state order by t1.Testing Entity Column-t2.Testing Entity Column Value Entity"
if tes == 'total tests' or tes == 'total tests increased':
sql = sql.replace("Testing Entity Column", 'totalTestResults')
sql = sql.replace("(Null)", "totalTestResults is not null")
elif tes == 'positive tests' or tes == 'positive tests increased':
sql = sql.replace("Testing Entity Column", 'positive')
sql = sql.replace("(Null)", "positive is not null")
elif tes == 'negative tests' or tes == 'negative tests increased':
sql = sql.replace("Testing Entity Column", 'negative')
sql = sql.replace("(Null)", "negative is not null")
else:
sql = sql.replace("Testing Entity Column", 'totalTestResultsIncrease')
sql = sql.replace("(Null)", "totalTestResultsIncrease is not null")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
start_date, end_date = monthconvert2(mon)
sql = sql.replace("Time End", end_date)
sql = sql.replace("Time Start", start_date)
bucket.update({'sql': sql})
if real_question == "List the state with the (Value Entity) (Testing Entity) in (Month).":
sql = "Select t1.state from (Select state, Testing Entity Column from db2State where date = 'Time End' and (Null)) as t1 Inner Join (Select state, Testing Entity Column from db2State where date = 'Time Start' and (Null)) as t2 on t1.state = t2.state order by t1.Testing Entity Column-t2.Testing Entity Column Value Entity"
if tes == 'total tests' or tes == 'total tests increased':
sql = sql.replace("Testing Entity Column", 'totalTestResults')
sql = sql.replace("(Null)", "totalTestResults is not null")
elif tes == 'positive tests' or tes == 'positive tests increased':
sql = sql.replace("Testing Entity Column", 'positive')
sql = sql.replace("(Null)", "positive is not null")
elif tes == 'negative tests' or tes == 'negative tests increased':
sql = sql.replace("Testing Entity Column", 'negative')
sql = sql.replace("(Null)", "negative is not null")
else:
sql = sql.replace("Testing Entity Column", 'totalTestResultsIncrease')
sql = sql.replace("(Null)", "totalTestResultsIncrease is not null")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
start_date, end_date = monthconvert2(mon)
sql = sql.replace("Time End", end_date)
sql = sql.replace("Time Start", start_date)
bucket.update({'sql': sql})
if real_question == "What percentage of (Cases) in (State Entity) are (Race)?":
sql = "Select Case Entity Column_Race Entity Column from db2race where date = 'given date' and state = \"State Entity\" "
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
race_input = race_input + '*100.0/Cases_Total'
else:
sql = sql.replace("Case Entity Column", "Deaths")
race_input = race_input + '*100.0/Deaths_Total'
sql = sql.replace("Race Entity Column", race_input)
sql = sql.replace("given date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Give me the percentage of (Cases) in (State Entity) that are (Race).":
sql = "Select Case Entity Column_Race Entity Column from db2race where date = 'given date' and state = \"State Entity\" "
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
race_input = race_input + '*100.0/Cases_Total'
else:
sql = sql.replace("Case Entity Column", "Deaths")
race_input = race_input + '*100.0/Deaths_Total'
sql = sql.replace("Race Entity Column", race_input)
sql = sql.replace("given date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Provide me with the percentage of (Cases) in (State Entity) that are (Race).":
sql = "Select Case Entity Column_Race Entity Column from db2race where date = 'given date' and state = \"State Entity\" "
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
race_input = race_input + '*100.0/Cases_Total'
else:
sql = sql.replace("Case Entity Column", "Deaths")
race_input = race_input + '*100.0/Deaths_Total'
sql = sql.replace("Race Entity Column", race_input)
sql = sql.replace("given date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "List the percentage of (Cases) in (State Entity) that are (Race).":
sql = "Select Case Entity Column_Race Entity Column from db2race where date = 'given date' and state = \"State Entity\" "
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
race_input = race_input + '*100.0/Cases_Total'
else:
sql = sql.replace("Case Entity Column", "Deaths")
race_input = race_input + '*100.0/Deaths_Total'
sql = sql.replace("Race Entity Column", race_input)
sql = sql.replace("given date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Which state has the (Value Entity) percentage of (Race) (Cases)?":
sql = "Select state from db2race where date = 'given date' and Case Entity Column_Race Entity Column is not null order by Case Entity Column_Race Entity Column Value Entity"
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
sql = sql.replace("given date", today)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
race_input = race_input + '*100.0/Cases_Total'
else:
sql = sql.replace("Case Entity Column", "Deaths")
race_input = race_input + '*100.0/Deaths_Total'
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Give me the state with the (Value Entity) percentage of (Race) (Cases).":
sql = "Select state from db2race where date = 'given date' and Case Entity Column_Race Entity Column is not null order by Case Entity Column_Race Entity Column Value Entity"
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
sql = sql.replace("given date", today)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
race_input = race_input + '*100.0/Cases_Total'
else:
sql = sql.replace("Case Entity Column", "Deaths")
race_input = race_input + '*100.0/Deaths_Total'
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Provide me with the state with the (Value Entity) percentage of (Race) (Cases).":
sql = "Select state from db2race where date = 'given date' and Case Entity Column_Race Entity Column is not null order by Case Entity Column_Race Entity Column Value Entity"
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
sql = sql.replace("given date", today)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
race_input = race_input + '*100.0/Cases_Total'
else:
sql = sql.replace("Case Entity Column", "Deaths")
race_input = race_input + '*100.0/Deaths_Total'
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "List the state with the (Value Entity) percentage of (Race) (Cases).":
sql = "Select state from db2race where date = 'given date' and Case Entity Column_Race Entity Column is not null order by Case Entity Column_Race Entity Column Value Entity"
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
sql = sql.replace("given date", today)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
race_input = race_input + '*100.0/Cases_Total'
else:
sql = sql.replace("Case Entity Column", "Deaths")
race_input = race_input + '*100.0/Deaths_Total'
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "How many (Race) (Cases) occurred in (State Entity) in (Day), (Month)?":
sql = "Select Case Entity Column_Race Entity Column from db2race where date = 'Time Entity' and state = \"State Entity\" "
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
else:
sql = sql.replace("Case Entity Column", "Deaths")
given_date = dateconvert2(day, mon)
sql = sql.replace("Time Entity", given_date)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Give me the number of (Race) (Cases) occurred in (State Entity) in (Day), (Month).":
sql = "Select Case Entity Column_Race Entity Column from db2race where date = 'Time Entity' and state = \"State Entity\" "
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
else:
sql = sql.replace("Case Entity Column", "Deaths")
given_date = dateconvert2(day, mon)
sql = sql.replace("Time Entity", given_date)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Provide me with the number of (Race) (Cases) occurred in (State Entity) in (Month).":
sql = "Select(Select Case Entity Column_Race Entity Column from db2race where date = 'Time End' and state = \"State Entity\") - (Select Case Entity Column_Race Entity Column from db2race where date = 'Time Start' and state = \"State Entity\") "
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
else:
sql = sql.replace("Case Entity Column", "Deaths")
start_date, end_date = monthconvert2(mon)
sql = sql.replace("Time End", end_date)
sql = sql.replace("Time Start", start_date)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "List the number of (Race) (Cases) occurred in (State Entity) in (Month).":
sql = "Select(Select Case Entity Column_Race Entity Column from db2race where date = 'Time End' and state = \"State Entity\") - (Select Case Entity Column_Race Entity Column from db2race where date = 'Time Start' and state = \"State Entity\") "
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
else:
sql = sql.replace("Case Entity Column", "Deaths")
start_date, end_date = monthconvert2(mon)
sql = sql.replace("Time End", end_date)
sql = sql.replace("Time Start", start_date)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
if rac == 'African-American' or rac == 'Black':
race_input = 'Black'
elif rac == 'Asian':
race_input = 'Asian'
elif rac == 'White' or rac == 'Caucasian':
race_input = 'White'
elif rac == 'American Indian' or rac == 'Alaska Native' or rac == 'American Indian or Alaska Native':
race_input = 'AIAN'
elif rac == 'Pacific Islander' or rac == 'Native Hawaiian' or rac == 'Pacific Islander and Native Hawaiian':
race_input = 'NHPI'
elif rac == 'multiracial' or rac == 'mixed':
race_input = 'Multiracial'
else:
race_input = 'Latinx'
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Which state has the (Value Entity) (Rate Entity)?":
sql = "Select state from db2state where date = 'current date' and (Null) order by Rate Entity Column Value Entity"
if rat == 'daily percent positive rate':
sql = sql.replace("Rate Entity Column", "positiveIncrease * 100.0/totalTestResultsIncrease")
sql = sql.replace("(Null)", "positiveIncrease is not null and totalTestResultsIncrease is not null")
elif rat == 'daily percent negative rate':
sql = sql.replace("Rate Entity Column", "negativeIncrease *100.0/totalTestResultsIncrease")
sql = sql.replace("(Null)", "negativeIncrease is not null and totalTestResultsIncrease is not null")
elif rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "positive *100.0/totalTestResults")
sql = sql.replace("(Null)", "positive is not null and totalTestResults is not null")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "negative * 100.0/totalTestResults")
sql = sql.replace("(Null)", "negative is not null and totalTestResults is not null")
else:
sql = sql.replace("Rate Entity Column", "hospitalizedCumulative * 100.0/positive")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null and positive is not null")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
sql = sql.replace("current date", today)
bucket.update({'sql': sql})
if real_question == "Give me the state with the (Value Entity) (Rate Entity).":
sql = "Select state from db2state where date = 'current date' and (Null) order by Rate Entity Column Value Entity"
if rat == 'daily percent positive rate':
sql = sql.replace("Rate Entity Column", "positiveIncrease * 100.0/totalTestResultsIncrease")
sql = sql.replace("(Null)", "positiveIncrease is not null and totalTestResultsIncrease is not null")
elif rat == 'daily percent negative rate':
sql = sql.replace("Rate Entity Column", "negativeIncrease *100.0/totalTestResultsIncrease")
sql = sql.replace("(Null)", "negativeIncrease is not null and totalTestResultsIncrease is not null")
elif rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "positive *100.0/totalTestResults")
sql = sql.replace("(Null)", "positive is not null and totalTestResults is not null")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "negative * 100.0/totalTestResults")
sql = sql.replace("(Null)", "negative is not null and totalTestResults is not null")
else:
sql = sql.replace("Rate Entity Column", "hospitalizedCumulative * 100.0/positive")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null and positive is not null")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
sql = sql.replace("current date", today)
bucket.update({'sql': sql})
if real_question == "Provide me with the state with the (Value Entity) (Rate Entity).":
sql = "Select state from db2state where date = 'current date' and (Null) order by Rate Entity Column Value Entity"
if rat == 'daily percent positive rate':
sql = sql.replace("Rate Entity Column", "positiveIncrease * 100.0/totalTestResultsIncrease")
sql = sql.replace("(Null)", "positiveIncrease is not null and totalTestResultsIncrease is not null")
elif rat == 'daily percent negative rate':
sql = sql.replace("Rate Entity Column", "negativeIncrease *100.0/totalTestResultsIncrease")
sql = sql.replace("(Null)", "negativeIncrease is not null and totalTestResultsIncrease is not null")
elif rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "positive *100.0/totalTestResults")
sql = sql.replace("(Null)", "positive is not null and totalTestResults is not null")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "negative * 100.0/totalTestResults")
sql = sql.replace("(Null)", "negative is not null and totalTestResults is not null")
else:
sql = sql.replace("Rate Entity Column", "hospitalizedCumulative * 100.0/positive")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null and positive is not null")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
sql = sql.replace("current date", today)
bucket.update({'sql': sql})
if real_question == "List the state with the (Value Entity) (Rate Entity).":
sql = "Select state from db2state where date = 'current date' and (Null) order by Rate Entity Column Value Entity"
if rat == 'daily percent positive rate':
sql = sql.replace("Rate Entity Column", "positiveIncrease * 100.0/totalTestResultsIncrease")
sql = sql.replace("(Null)", "positiveIncrease is not null and totalTestResultsIncrease is not null")
elif rat == 'daily percent negative rate':
sql = sql.replace("Rate Entity Column", "negativeIncrease *100.0/totalTestResultsIncrease")
sql = sql.replace("(Null)", "negativeIncrease is not null and totalTestResultsIncrease is not null")
elif rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "positive *100.0/totalTestResults")
sql = sql.replace("(Null)", "positive is not null and totalTestResults is not null")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "negative * 100.0/totalTestResults")
sql = sql.replace("(Null)", "negative is not null and totalTestResults is not null")
else:
sql = sql.replace("Rate Entity Column", "hospitalizedCumulative * 100.0/positive")
sql = sql.replace("(Null)", "hospitalizedCumulative is not null and positive is not null")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
sql = sql.replace("current date", today)
bucket.update({'sql': sql})
if real_question == "What is the racial breakdown of (Cases) in (State Entity)?":
sql = "Select Case Entity Column_Total,Case Entity Column_White, Case Entity Column_Black,Case Entity Column_LatinX, Case Entity Column_Asian,Case Entity Column_NHPI, Case Entity Column_Multiracial, Case Entity Column_Other, Case Entity Column_Unknown from db2race where date = 'given date' and state = \"State Entity\" "
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
else:
sql = sql.replace("Case Entity Column", "Deaths")
sql = sql.replace("given date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Give me the racial breakdown of (Cases) in (State Entity).":
sql = "Select Case Entity Column_Total,Case Entity Column_White, Case Entity Column_Black,Case Entity Column_LatinX, Case Entity Column_Asian,Case Entity Column_NHPI, Case Entity Column_Multiracial, Case Entity Column_Other, Case Entity Column_Unknown from db2race where date = 'given date' and state = \"State Entity\" "
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
else:
sql = sql.replace("Case Entity Column", "Deaths")
sql = sql.replace("given date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "Provide me with the racial breakdown of (Cases) in (State Entity).":
sql = "Select Case Entity Column_Total,Case Entity Column_White, Case Entity Column_Black,Case Entity Column_LatinX, Case Entity Column_Asian,Case Entity Column_NHPI, Case Entity Column_Multiracial, Case Entity Column_Other, Case Entity Column_Unknown from db2race where date = 'given date' and state = \"State Entity\" "
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
else:
sql = sql.replace("Case Entity Column", "Deaths")
sql = sql.replace("given date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "List the racial breakdown of (Cases) in (State Entity).":
sql = "Select Case Entity Column_Total,Case Entity Column_White, Case Entity Column_Black,Case Entity Column_LatinX, Case Entity Column_Asian,Case Entity Column_NHPI, Case Entity Column_Multiracial, Case Entity Column_Other, Case Entity Column_Unknown from db2race where date = 'given date' and state = \"State Entity\" "
if cas.find('cases') >= 0:
sql = sql.replace("Case Entity Column", "Cases")
else:
sql = sql.replace("Case Entity Column", "Deaths")
sql = sql.replace("given date", today)
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
bucket.update({'sql': sql})
if real_question == "What is the number of (Bed Entity) in (State Entity)?":
sql = "Select Sum(Bed Entity Column) From db3 Where STATE_Name = \"State Entity\""
if bed == 'staffed beds':
sql = sql.replace("Bed Entity Column", "NUM_STAFFED_BEDS")
elif bed == 'licensed beds':
sql = sql.replace("Bed Entity Column", "NUM_LICENSED_BEDS")
elif bed == 'ICU beds':
sql = sql.replace("Bed Entity Column", "NUM_ICU_BEDS")
sql = sql.replace("State Entity", state)
bucket.update({'sql': sql})
if real_question == "Give me the number of (Bed Entity) in (State Entity).":
sql = "Select Sum(Bed Entity Column) From db3 Where STATE_Name = \"State Entity\""
if bed == 'staffed beds':
sql = sql.replace("Bed Entity Column", "NUM_STAFFED_BEDS")
elif bed == 'licensed beds':
sql = sql.replace("Bed Entity Column", "NUM_LICENSED_BEDS")
elif bed == 'ICU beds':
sql = sql.replace("Bed Entity Column", "NUM_ICU_BEDS")
sql = sql.replace("State Entity", state)
bucket.update({'sql': sql})
if real_question == "Provide me with the number of (Bed Entity) in (County Entity), (State Entity).":
sql = "Select SUM(Bed Entity Column) from db3 where STATE_Name = \"State Entity\" and COUNTY_Name = \"County Entity\""
if bed == 'staffed beds':
sql = sql.replace("Bed Entity Column", "NUM_STAFFED_BEDS")
elif bed == 'licensed beds':
sql = sql.replace("Bed Entity Column", "NUM_LICENSED_BEDS")
elif bed == 'ICU beds':
sql = sql.replace("Bed Entity Column", "NUM_ICU_BEDS")
sql = sql.replace("County Entity", county_name)
sql = sql.replace("State Entity", state_name)
bucket.update({'sql': sql})
if real_question == "List the number of (Bed Entity) in (County Entity), (State Entity).":
sql = "Select SUM(Bed Entity Column) from db3 where STATE_Name = \"State Entity\" and COUNTY_Name = \"County Entity\""
if bed == 'staffed beds':
sql = sql.replace("Bed Entity Column", "NUM_STAFFED_BEDS")
elif bed == 'licensed beds':
sql = sql.replace("Bed Entity Column", "NUM_LICENSED_BEDS")
elif bed == 'ICU beds':
sql = sql.replace("Bed Entity Column", "NUM_ICU_BEDS")
sql = sql.replace("County Entity", county_name)
sql = sql.replace("State Entity", state_name)
bucket.update({'sql': sql})
if real_question == "What is the breakdown of (Cases) by (Demographic Entity) in the United States?":
sql = "Select * from table_name"
if cas == 'cases' or cas == 'confirmed cases':
if dem == 'sex' or dem == 'gender':
table_name = 'db4casesex'
elif dem == 'race' or dem == 'race and ethnicity' or dem == 'ethnicity':
table_name = 'db4caserace'
else:
table_name = 'db4caseage'
else:
if dem == 'sex' or dem == 'gender':
table_name = 'db4deathsex'
elif dem == 'race' or dem == 'race and ethnicity' or dem == 'ethnicity':
table_name = 'db4deathrace'
else:
table_name = 'db4deathage'
sql = sql.replace("table_name", table_name)
bucket.update({'sql': sql})
if real_question == "Give me the breakdown of (Cases) by (Demographic Entity) in the United States.":
sql = "Select * from table_name"
if cas == 'cases' or cas == 'confirmed cases':
if dem == 'sex' or dem == 'gender':
table_name = 'db4casesex'
elif dem == 'race' or dem == 'race and ethnicity' or dem == 'ethnicity':
table_name = 'db4caserace'
else:
table_name = 'db4caseage'
else:
if dem == 'sex' or dem == 'gender':
table_name = 'db4deathsex'
elif dem == 'race' or dem == 'race and ethnicity' or dem == 'ethnicity':
table_name = 'db4deathrace'
else:
table_name = 'db4deathage'
sql = sql.replace("table_name", table_name)
bucket.update({'sql': sql})
if real_question == "Provide me with the breakdown of (Cases) by (Demographic Entity) in the United States.":
sql = "Select * from table_name"
if cas == 'cases' or cas == 'confirmed cases':
if dem == 'sex' or dem == 'gender':
table_name = 'db4casesex'
elif dem == 'race' or dem == 'race and ethnicity' or dem == 'ethnicity':
table_name = 'db4caserace'
else:
table_name = 'db4caseage'
else:
if dem == 'sex' or dem == 'gender':
table_name = 'db4deathsex'
elif dem == 'race' or dem == 'race and ethnicity' or dem == 'ethnicity':
table_name = 'db4deathrace'
else:
table_name = 'db4deathage'
sql = sql.replace("table_name", table_name)
bucket.update({'sql': sql})
if real_question == "List the breakdown of (Cases) by (Demographic Entity) in the United States.":
sql = "Select * from table_name"
if cas == 'cases' or cas == 'confirmed cases':
if dem == 'sex' or dem == 'gender':
table_name = 'db4casesex'
elif dem == 'race' or dem == 'race and ethnicity' or dem == 'ethnicity':
table_name = 'db4caserace'
else:
table_name = 'db4caseage'
else:
if dem == 'sex' or dem == 'gender':
table_name = 'db4deathsex'
elif dem == 'race' or dem == 'race and ethnicity' or dem == 'ethnicity':
table_name = 'db4deathrace'
else:
table_name = 'db4deathage'
sql = sql.replace("table_name", table_name)
bucket.update({'sql': sql})
if real_question == "Which (Demographic Entity) has the (Value Entity) (Amount Entity) (Cases) in the United States?":
sql = "Select Demographic Entity, Amount Entity from table name Order by Amount Entity Value Entity"
if cas == 'confirmed cases' or cas == 'cases':
if dem == 'sex' or dem == 'gender':
sql = sql.replace("Demographic Entity", "Sex")
sql = sql.replace("table name", "db4casesex")
elif dem == 'age' or dem == 'age group':
sql = sql.replace("Demographic Entity", "Age_Group")
sql = sql.replace("table name", "db4caseage")
else:
sql = sql.replace("Demographic Entity", "Race_Ethnicity")
sql = sql.replace("table name", "db4caserace")
else:
if dem == 'sex' or dem == 'gender':
sql = sql.replace("Demographic Entity", "Sex")
sql = sql.replace("table name", "db4deathsex")
elif dem == 'age' or dem == 'age group':
sql = sql.replace("Demographic Entity", "Age_Group")
sql = sql.replace("table name", "db4deathage")
else:
sql = sql.replace("Demographic Entity", "Race_Ethnicity")
sql = sql.replace("table name", "db4deathrace")
if amo == 'percentage of':
sql = sql.replace("Amount Entity", "Count")
else:
sql = sql.replace("Amount Entity", "Count")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Give me the (Demographic Entity) that has the (Value Entity) (Amount Entity) (Cases) in the United States.":
sql = "Select Demographic Entity, Amount Entity from table name Order by Amount Entity Value Entity"
if cas == 'confirmed cases' or cas == 'cases':
if dem == 'sex' or dem == 'gender':
sql = sql.replace("Demographic Entity", "Sex")
sql = sql.replace("table name", "db4casesex")
elif dem == 'age' or dem == 'age group':
sql = sql.replace("Demographic Entity", "Age_Group")
sql = sql.replace("table name", "db4caseage")
else:
sql = sql.replace("Demographic Entity", "Race_Ethnicity")
sql = sql.replace("table name", "db4caserace")
else:
if dem == 'sex' or dem == 'gender':
sql = sql.replace("Demographic Entity", "Sex")
sql = sql.replace("table name", "db4deathsex")
elif dem == 'age' or dem == 'age group':
sql = sql.replace("Demographic Entity", "Age_Group")
sql = sql.replace("table name", "db4deathage")
else:
sql = sql.replace("Demographic Entity", "Race_Ethnicity")
sql = sql.replace("table name", "db4deathrace")
if amo == 'percentage of':
sql = sql.replace("Amount Entity", "Count")
else:
sql = sql.replace("Amount Entity", "Count")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Provide me with the (Demographic Entity) that has the (Value Entity) (Amount Entity) (Cases) in the United States.":
sql = "Select Demographic Entity, Amount Entity from table name Order by Amount Entity Value Entity"
if cas == 'confirmed cases' or cas == 'cases':
if dem == 'sex' or dem == 'gender':
sql = sql.replace("Demographic Entity", "Sex")
sql = sql.replace("table name", "db4casesex")
elif dem == 'age' or dem == 'age group':
sql = sql.replace("Demographic Entity", "Age_Group")
sql = sql.replace("table name", "db4caseage")
else:
sql = sql.replace("Demographic Entity", "Race_Ethnicity")
sql = sql.replace("table name", "db4caserace")
else:
if dem == 'sex' or dem == 'gender':
sql = sql.replace("Demographic Entity", "Sex")
sql = sql.replace("table name", "db4deathsex")
elif dem == 'age' or dem == 'age group':
sql = sql.replace("Demographic Entity", "Age_Group")
sql = sql.replace("table name", "db4deathage")
else:
sql = sql.replace("Demographic Entity", "Race_Ethnicity")
sql = sql.replace("table name", "db4deathrace")
if amo == 'percentage of':
sql = sql.replace("Amount Entity", "Count")
else:
sql = sql.replace("Amount Entity", "Count")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "List the (Demographic Entity) that has the (Value Entity) (Amount Entity) (Cases) in the United States.":
sql = "Select Demographic Entity, Amount Entity from table name Order by Amount Entity Value Entity"
if cas == 'confirmed cases' or cas == 'cases':
if dem == 'sex' or dem == 'gender':
sql = sql.replace("Demographic Entity", "Sex")
sql = sql.replace("table name", "db4casesex")
elif dem == 'age' or dem == 'age group':
sql = sql.replace("Demographic Entity", "Age_Group")
sql = sql.replace("table name", "db4caseage")
else:
sql = sql.replace("Demographic Entity", "Race_Ethnicity")
sql = sql.replace("table name", "db4caserace")
else:
if dem == 'sex' or dem == 'gender':
sql = sql.replace("Demographic Entity", "Sex")
sql = sql.replace("table name", "db4deathsex")
elif dem == 'age' or dem == 'age group':
sql = sql.replace("Demographic Entity", "Age_Group")
sql = sql.replace("table name", "db4deathage")
else:
sql = sql.replace("Demographic Entity", "Race_Ethnicity")
sql = sql.replace("table name", "db4deathrace")
if amo == 'percentage of':
sql = sql.replace("Amount Entity", "Count")
else:
sql = sql.replace("Amount Entity", "Count")
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "What is the total forecasted number of deaths in (State Entity) in the next (x) days?":
sql = "Select Max(point) from db4forecaststate where target_week_end_date = 'Time Entity' and location_name = \"State Entity\""
sql = sql.replace("State Entity", state)
today = datetime.date.today()
num_day = int(num)
future_date = today + datetime.timedelta(days=num_day)
if future_date.weekday() == 5:
sql = sql.replace("Time Entity", str(future_date))
elif future_date.weekday() == 6:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=6)))
else:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=5 - future_date.weekday())))
bucket.update({'sql': sql})
if real_question == "Provide me with the total forecasted number of deaths in (State Entity) in the next (x) days.":
sql = "Select Max(point) from db4forecaststate where target_week_end_date = 'Time Entity' and location_name = \"State Entity\""
sql = sql.replace("State Entity", state)
today = datetime.date.today()
num_day = int(num)
future_date = today + datetime.timedelta(days=num_day)
if future_date.weekday() == 5:
sql = sql.replace("Time Entity", str(future_date))
elif future_date.weekday() == 6:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=6)))
else:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=5 - future_date.weekday())))
bucket.update({'sql': sql})
if real_question == "Give me the total forecasted number of deaths in (State Entity) in the next (x) days.":
sql = "Select Max(point) from db4forecaststate where target_week_end_date = 'Time Entity' and location_name = \"State Entity\""
sql = sql.replace("State Entity", state)
today = datetime.date.today()
num_day = int(num)
future_date = today + datetime.timedelta(days=num_day)
if future_date.weekday() == 5:
sql = sql.replace("Time Entity", str(future_date))
elif future_date.weekday() == 6:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=6)))
else:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=5 - future_date.weekday())))
bucket.update({'sql': sql})
if real_question == "List the total forecasted number of deaths in (State Entity) in the next (x) days.":
sql = "Select Max(point) from db4forecaststate where target_week_end_date = 'Time Entity' and location_name = \"State Entity\""
sql = sql.replace("State Entity", state)
today = datetime.date.today()
num_day = int(num)
future_date = today + datetime.timedelta(days=num_day)
if future_date.weekday() == 5:
sql = sql.replace("Time Entity", str(future_date))
elif future_date.weekday() == 6:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=6)))
else:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=5 - future_date.weekday())))
bucket.update({'sql': sql})
if real_question == "Which state will have the (Value Entity) total forecasted number of deaths in the next (x) days?":
sql = "Select location_name, Max(point) from db4forecaststate WHERE target_week_end_date = 'Time Entity' and location_name != 'National' group by location_name order by Max(point) asc/desc limit 0,1"
if val == 'highest' or val == 'most':
sql = sql.replace("asc/desc", "desc")
else:
sql = sql.replace("asc/desc", "asc")
today = datetime.date.today()
num_day = int(num)
future_date = today + datetime.timedelta(days=num_day)
if future_date.weekday() == 5:
sql = sql.replace("Time Entity", str(future_date))
elif future_date.weekday() == 6:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=6)))
else:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=5 - future_date.weekday())))
bucket.update({'sql': sql})
if real_question == "Provide me with the state that have the (Value Entity) total forecasted number of deaths in the next (x) days.":
sql = "Select location_name, Max(point) from db4forecaststate WHERE target_week_end_date = 'Time Entity' and location_name != 'National' group by location_name order by Max(point) asc/desc limit 0,1"
if val == 'highest' or val == 'most':
sql = sql.replace("asc/desc", "desc")
else:
sql = sql.replace("asc/desc", "asc")
today = datetime.date.today()
num_day = int(num)
future_date = today + datetime.timedelta(days=num_day)
if future_date.weekday() == 5:
sql = sql.replace("Time Entity", str(future_date))
elif future_date.weekday() == 6:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=6)))
else:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=5 - future_date.weekday())))
bucket.update({'sql': sql})
if real_question == "Give me the state that have the (Value Entity) total forecasted number of deaths in the next (x) days.":
sql = "Select location_name, Max(point) from db4forecaststate WHERE target_week_end_date = 'Time Entity' and location_name != 'National' group by location_name order by Max(point) asc/desc limit 0,1"
if val == 'highest' or val == 'most':
sql = sql.replace("asc/desc", "desc")
else:
sql = sql.replace("asc/desc", "asc")
today = datetime.date.today()
num_day = int(num)
future_date = today + datetime.timedelta(days=num_day)
if future_date.weekday() == 5:
sql = sql.replace("Time Entity", str(future_date))
elif future_date.weekday() == 6:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=6)))
else:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=5 - future_date.weekday())))
bucket.update({'sql': sql})
if real_question == "List the state that have the (Value Entity) total forecasted number of deaths in the next (x) days.":
sql = "Select location_name, Max(point) from db4forecaststate WHERE target_week_end_date = 'Time Entity' and location_name != 'National' group by location_name order by Max(point) asc/desc limit 0,1"
if val == 'highest' or val == 'most':
sql = sql.replace("asc/desc", "desc")
else:
sql = sql.replace("asc/desc", "asc")
today = datetime.date.today()
num_day = int(num)
future_date = today + datetime.timedelta(days=num_day)
if future_date.weekday() == 5:
sql = sql.replace("Time Entity", str(future_date))
elif future_date.weekday() == 6:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=6)))
else:
sql = sql.replace("Time Entity", str(future_date + datetime.timedelta(days=5 - future_date.weekday())))
bucket.update({'sql': sql})
if real_question == "What is the percentage change in (Mobility Entity) in (State Entity) in (Day), (Month)?":
sql = "Select (Mobility) FROM db4mobility WHERE date = \"given date\" AND country_region = \"United States\" AND sub_region_1 = \"State name\" and iso_3166_2_code LIKE \"US-%\""
given_date = dateconvert6(day, mon)
sql = sql.replace("given date", given_date)
sql = sql.replace("State name", state)
if mob == 'retail and recreation':
sql = sql.replace("(Mobility)", "retail_and_recreation_percent_change_from_baseline")
elif mob == 'grocery and pharmacy':
sql = sql.replace("(Mobility)", "grocery_and_pharmacy_percent_change_from_baseline")
elif mob == 'parks':
sql = sql.replace("(Mobility)", "parks_percent_change_from_baseline")
elif mob == 'transit stations':
sql = sql.replace("(Mobility)", "transit_stations_percent_change_from_baseline")
elif mob == 'workplaces':
sql = sql.replace("(Mobility)", "workplaces_percent_change_from_baseline")
else:
sql = sql.replace("(Mobility)", "residential_percent_change_from_baseline")
bucket.update({'sql': sql})
if real_question == "Give me the percentage change in (Mobility Entity) in (State Entity) in (Day), (Month).":
sql = "Select (Mobility) FROM db4mobility WHERE date = \"given date\" AND country_region = \"United States\" AND sub_region_1 = \"State name\" and iso_3166_2_code LIKE \"US-%\""
given_date = dateconvert6(day, mon)
sql = sql.replace("given date", given_date)
sql = sql.replace("State name", state)
if mob == 'retail and recreation':
sql = sql.replace("(Mobility)", "retail_and_recreation_percent_change_from_baseline")
elif mob == 'grocery and pharmacy':
sql = sql.replace("(Mobility)", "grocery_and_pharmacy_percent_change_from_baseline")
elif mob == 'parks':
sql = sql.replace("(Mobility)", "parks_percent_change_from_baseline")
elif mob == 'transit stations':
sql = sql.replace("(Mobility)", "transit_stations_percent_change_from_baseline")
elif mob == 'workplaces':
sql = sql.replace("(Mobility)", "workplaces_percent_change_from_baseline")
else:
sql = sql.replace("(Mobility)", "residential_percent_change_from_baseline")
bucket.update({'sql': sql})
if real_question == "Provide me with the percentage change in (Mobility Entity) in (County Entity) (State Entity) in (Month).":
sql = "Select (Select (Mobility) from db4mobility where date = 'end date' and country_region = 'United States' and sub_region_1 = \"State name\" and iso_3166_2_code LIKE \"US-%\") - (Select (Mobility) from db4mobility where date = 'start date' and country_region = 'United States' and sub_region_1 = \"State name\" and iso_3166_2_code LIKE \"US-%\")"
sql = sql.replace("State name", state_name + "\" AND sub_region_2 = \"" + county_name + " County")
start_date, end_date = monthconvert4(mon)
sql = sql.replace("end date", end_date)
sql = sql.replace("start date", start_date)
if mob == 'retail and recreation':
sql = sql.replace("(Mobility)", "retail_and_recreation_percent_change_from_baseline")
elif mob == 'grocery and pharmacy':
sql = sql.replace("(Mobility)", "grocery_and_pharmacy_percent_change_from_baseline")
elif mob == 'parks':
sql = sql.replace("(Mobility)", "parks_percent_change_from_baseline")
elif mob == 'transit stations':
sql = sql.replace("(Mobility)", "transit_stations_percent_change_from_baseline")
elif mob == 'workplaces':
sql = sql.replace("(Mobility)", "workplaces_percent_change_from_baseline")
else:
sql = sql.replace("(Mobility)", "residential_percent_change_from_baseline")
bucket.update({'sql': sql})
if real_question == "List the percentage change in (Mobility Entity) in (County Entity) (State Entity) in (Month).":
sql = "Select (Select (Mobility) from db4mobility where date = 'end date' and country_region = 'United States' and sub_region_1 = \"State name\" and iso_3166_2_code LIKE \"US-%\") - (Select (Mobility) from db4mobility where date = 'start date' and country_region = 'United States' and sub_region_1 = \"State name\" and iso_3166_2_code LIKE \"US-%\")"
sql = sql.replace("State name", state_name + "\" AND sub_region_2 = \"" + county_name + " County")
start_date, end_date = monthconvert4(mon)
sql = sql.replace("end date", end_date)
sql = sql.replace("start date", start_date)
if mob == 'retail and recreation':
sql = sql.replace("(Mobility)", "retail_and_recreation_percent_change_from_baseline")
elif mob == 'grocery and pharmacy':
sql = sql.replace("(Mobility)", "grocery_and_pharmacy_percent_change_from_baseline")
elif mob == 'parks':
sql = sql.replace("(Mobility)", "parks_percent_change_from_baseline")
elif mob == 'transit stations':
sql = sql.replace("(Mobility)", "transit_stations_percent_change_from_baseline")
elif mob == 'workplaces':
sql = sql.replace("(Mobility)", "workplaces_percent_change_from_baseline")
else:
sql = sql.replace("(Mobility)", "residential_percent_change_from_baseline")
bucket.update({'sql': sql})
if real_question == "Which state had the (Value Entity) percentage change in (Mobility Entity) today?":
sql = "Select sub_region_1, (Mobility) from db4mobility where date = 'Time Entity' and country_region = 'United States' and iso_3166_2_code like \"US-%\" and (Mobility) is not null order by (Mobility) asc/desc limit 0,1"
if mob == 'retail and recreation':
sql = sql.replace("(Mobility)", "retail_and_recreation_percent_change_from_baseline")
elif mob == 'grocery and pharmacy':
sql = sql.replace("(Mobility)", "grocery_and_pharmacy_percent_change_from_baseline")
elif mob == 'parks':
sql = sql.replace("(Mobility)", "parks_percent_change_from_baseline")
elif mob == 'transit stations':
sql = sql.replace("(Mobility)", "transit_stations_percent_change_from_baseline")
elif mob == 'workplaces':
sql = sql.replace("(Mobility)", "workplaces_percent_change_from_baseline")
else:
sql = sql.replace("(Mobility)", "residential_percent_change_from_baseline")
if val == 'highest' or val == 'most':
sql = sql.replace("asc/desc", "desc")
else:
sql = sql.replace("asc/desc", "asc")
today = datetime.date.today()
sql = sql.replace("Time Entity", str(today))
bucket.update({'sql': sql})
if real_question == "Give me the state that had the (Value Entity) percentage change in (Mobility Entity) today.":
sql = "Select sub_region_1, (Mobility) from db4mobility where date = 'Time Entity' and country_region = 'United States' and iso_3166_2_code like \"US-%\" and (Mobility) is not null order by (Mobility) asc/desc limit 0,1"
if mob == 'retail and recreation':
sql = sql.replace("(Mobility)", "retail_and_recreation_percent_change_from_baseline")
elif mob == 'grocery and pharmacy':
sql = sql.replace("(Mobility)", "grocery_and_pharmacy_percent_change_from_baseline")
elif mob == 'parks':
sql = sql.replace("(Mobility)", "parks_percent_change_from_baseline")
elif mob == 'transit stations':
sql = sql.replace("(Mobility)", "transit_stations_percent_change_from_baseline")
elif mob == 'workplaces':
sql = sql.replace("(Mobility)", "workplaces_percent_change_from_baseline")
else:
sql = sql.replace("(Mobility)", "residential_percent_change_from_baseline")
if val == 'highest' or val == 'most':
sql = sql.replace("asc/desc", "desc")
else:
sql = sql.replace("asc/desc", "asc")
today = datetime.date.today()
sql = sql.replace("Time Entity", str(today))
bucket.update({'sql': sql})
if real_question == "Give me the county that had the (Value Entity) percentage change in (Mobility Entity) in (Month).":
sql = """Select t1.sub_region_2, t1.(Mobility)-t2.(Mobility) from(Select sub_region_2, (Mobility) from db4mobility where date = 'end date' and country_region = 'United States' and sub_region_2 is not null and (Mobility) is not null) as t1 Inner Join (Select sub_region_2, (Mobility) from db4mobility where date = 'start date' and country_region = 'United States' and (Mobility) is not null) as t2 on t1 sub_region_2=t2.sub_region_2 order by t1.(Mobility)-t2.(Mobility) asc/desc limit 0,1"""
if mob == 'retail and recreation':
sql = sql.replace("(Mobility)", "retail_and_recreation_percent_change_from_baseline")
elif mob == 'grocery and pharmacy':
sql = sql.replace("(Mobility)", "grocery_and_pharmacy_percent_change_from_baseline")
elif mob == 'parks':
sql = sql.replace("(Mobility)", "parks_percent_change_from_baseline")
elif mob == 'transit stations':
sql = sql.replace("(Mobility)", "transit_stations_percent_change_from_baseline")
elif mob == 'workplaces':
sql = sql.replace("(Mobility)", "workplaces_percent_change_from_baseline")
else:
sql = sql.replace("(Mobility)", "residential_percent_change_from_baseline")
if val == 'highest' or val == 'most':
sql = sql.replace("asc/desc", "desc")
else:
sql = sql.replace("asc/desc", "asc")
start_date, end_date = monthconvert4(mon)
sql = sql.replace("end date", end_date)
sql = sql.replace("start date", start_date)
bucket.update({'sql': sql})
if real_question == "List the county that had the (Value Entity) percentage change in (Mobility Entity) in (Month).":
sql = """Select t1.sub_region_2, t1.(Mobility)-t2.(Mobility) from(Select sub_region_2, (Mobility) from db4mobility where date = 'end date' and country_region = 'United States' and sub_region_2 is not null and (Mobility) is not null) as t1 Inner Join (Select sub_region_2, (Mobility) from db4mobility where date = 'start date' and country_region = 'United States' and (Mobility) is not null) as t2 on t1 sub_region_2=t2.sub_region_2 order by t1.(Mobility)-t2.(Mobility) asc/desc limit 0,1"""
if mob == 'retail and recreation':
sql = sql.replace("(Mobility)", "retail_and_recreation_percent_change_from_baseline")
elif mob == 'grocery and pharmacy':
sql = sql.replace("(Mobility)", "grocery_and_pharmacy_percent_change_from_baseline")
elif mob == 'parks':
sql = sql.replace("(Mobility)", "parks_percent_change_from_baseline")
elif mob == 'transit stations':
sql = sql.replace("(Mobility)", "transit_stations_percent_change_from_baseline")
elif mob == 'workplaces':
sql = sql.replace("(Mobility)", "workplaces_percent_change_from_baseline")
else:
sql = sql.replace("(Mobility)", "residential_percent_change_from_baseline")
if val == 'highest' or val == 'most':
sql = sql.replace("asc/desc", "desc")
else:
sql = sql.replace("asc/desc", "asc")
start_date, end_date = monthconvert4(mon)
sql = sql.replace("end date", end_date)
sql = sql.replace("start date", start_date)
bucket.update({'sql': sql})
if real_question == "Which county in (State Entity) has the (Value Entity) percentage of (Race Entity) deaths?":
sql = "Select County_Name from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = 'State Entity' order by Race Entity Column Value Entity"
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Give me the county in (State Entity) has the (Value Entity) percentage of (Race Entity) deaths.":
sql = "Select County_Name from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = 'State Entity' order by Race Entity Column Value Entity"
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Provide me with the county in (State Entity) that has the (Value Entity) percentage of (Race Entity) deaths.":
sql = "Select County_Name from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = 'State Entity' order by Race Entity Column Value Entity"
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "List the county in (State Entity) that has the (Value Entity) percentage of (Race Entity) deaths.":
sql = "Select County_Name from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = 'State Entity' order by Race Entity Column Value Entity"
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
state_abbreviation = state_key[state_val.index(state)]
sql = sql.replace("State Entity", state_abbreviation)
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "What percentage of Covid-19 deaths in (County Entity), (State Entity) are from (Race Entity)?":
sql = "Select Race Entity Column from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and County_Name = \"County Entity\" and State = \"State Entity\""
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Give me the percentage of Covid-19 deaths in (County Entity), (State Entity) that are from (Race Entity).":
sql = "Select Race Entity Column from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and County_Name = \"County Entity\" and State = \"State Entity\""
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Provide me with the percentage of Covid-19 deaths in (County Entity), (State Entity) that are from (Race Entity).":
sql = "Select Race Entity Column from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and County_Name = \"County Entity\" and State = \"State Entity\""
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "List the percentage of Covid-19 deaths in (County Entity), (State Entity) that are from (Race Entity).":
sql = "Select Race Entity Column from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and County_Name = \"County Entity\" and State = \"State Entity\""
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "How many (Race Entity) deaths occured in (County Entity), (State Entity)?":
sql = "Select Round((Select Race Entity Column from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = \"State Entity\" and County_Name = \"County Entity\") * (Select Deaths from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = \"State Entity\" and County_Name = \"County Entity\"))"
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Give me the number of (Race Entity) deaths occured in (County Entity), (State Entity).":
sql = "Select Round((Select Race Entity Column from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = \"State Entity\" and County_Name = \"County Entity\") * (Select Deaths from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = \"State Entity\" and County_Name = \"County Entity\"))"
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "Provide me with the number of (Race Entity) deaths occured in (County Entity), (State Entity).":
sql = "Select Round((Select Race Entity Column from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = \"State Entity\" and County_Name = \"County Entity\") * (Select Deaths from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = \"State Entity\" and County_Name = \"County Entity\"))"
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "List the number of (Race Entity) deaths occured in (County Entity), (State Entity).":
sql = "Select Round((Select Race Entity Column from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = \"State Entity\" and County_Name = \"County Entity\") * (Select Deaths from db5 where Indicator = 'Distribution of COVID-19 deaths (%)' and State = \"State Entity\" and County_Name = \"County Entity\"))"
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
specific_race = rac
if rac == "people of color":
race_input = "Non_Hispanic_Black"
else:
specific_race = rac
while specific_race.find("multiracial") >= 0 or specific_race.find("mixed") >= 0 or specific_race.find(
"Pacific Islander") >= 0 or specific_race.find("Native Hawaiian") >= 0 or specific_race.find(
"Pacific Islander and Native Hawaiian") >= 0 or (
(specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0) and rac.find(
"Non-Hispanic") >= 0):
specific_race = random.choice(data3['Race'])
if specific_race.find("African-American") >= 0 or specific_race.find("Black") >= 0:
race_input = "Non_Hispanic_Black"
elif specific_race.find("Hispanic") >= 0 or specific_race.find("Latino") >= 0:
race_input = "Hispanic"
elif specific_race.find("Asian") >= 0:
race_input = "Non_Hispanic_Asian"
elif specific_race.find("Alaska Native") >= 0 or specific_race.find(
"American Indian or Alaska Native") >= 0 or specific_race.find("American Indian") >= 0:
race_input = "Non_Hispanic_AIAN"
else:
race_input = "Non_Hispanic_White"
if rac.find("people of color") >= 0:
real_sub = rac
elif rac.find("people") < 0:
if specific_race == 'American Indian or Alaska Native':
real_sub = rac.replace("(race)", 'American Indians or Alaska Natives')
else:
real_sub = rac.replace("(race)", specific_race + 's')
else:
real_sub = rac.replace("(race)", specific_race)
sql = sql.replace("Race Entity Column", race_input)
bucket.update({'sql': sql})
if real_question == "What is the racial breakdown of Covid-19 deaths in (County Entity), (State Entity)?":
sql = """Select Deaths, Non_Hispanic_White, Non_Hispanic_Black, Non_Hispanic_AIAN, Non_Hispanic_Asian, Other, Hispanic from db5 where Indicator = \"Distribution of COVID-19 deaths (%)\" and State = \"State Entity\" and County_Name = \"County Entity\""""
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
bucket.update({'sql': sql})
if real_question == "Give me the racial breakdown of Covid-19 deaths in (County Entity), (State Entity).":
sql = """Select Deaths, Non_Hispanic_White, Non_Hispanic_Black, Non_Hispanic_AIAN, Non_Hispanic_Asian, Other, Hispanic from db5 where Indicator = \"Distribution of COVID-19 deaths (%)\" and State = \"State Entity\" and County_Name = \"County Entity\""""
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
bucket.update({'sql': sql})
if real_question == "Provide me with the racial breakdown of Covid-19 deaths in (County Entity), (State Entity).":
sql = """Select Deaths, Non_Hispanic_White, Non_Hispanic_Black, Non_Hispanic_AIAN, Non_Hispanic_Asian, Other, Hispanic from db5 where Indicator = \"Distribution of COVID-19 deaths (%)\" and State = \"State Entity\" and County_Name = \"County Entity\""""
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
bucket.update({'sql': sql})
if real_question == "List the racial breakdown of Covid-19 deaths in (County Entity), (State Entity).":
sql = """Select Deaths, Non_Hispanic_White, Non_Hispanic_Black, Non_Hispanic_AIAN, Non_Hispanic_Asian, Other, Hispanic from db5 where Indicator = \"Distribution of COVID-19 deaths (%)\" and State = \"State Entity\" and County_Name = \"County Entity\""""
state_abbreviation = state_key[state_val.index(state_name)]
sql = sql.replace("State Entity", state_abbreviation)
sql = sql.replace("County Entity", county_name + " County")
bucket.update({'sql': sql})
if real_question == "What are the number of (Testing Entity) done by (Country) in (Day), (Month)?":
sql = "Select Testing Entity Column from db6 where Entity = \"Country Name\" and Date = 'Time Entity'"
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
sql = sql.replace("Country Name", cou)
if tes == 'daily tests':
sql = sql.replace("Testing Entity Column", "New_Test")
sql = sql.replace("db6", "db6file4")
else:
sql = sql.replace("Testing Entity Column", "Total")
bucket.update({'sql': sql})
if real_question == "Give me the number of (Testing Entity) done by (Country) in (Day), (Month).":
sql = "Select Testing Entity Column from db6 where Entity = \"Country Name\" and Date = 'Time Entity'"
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
sql = sql.replace("Country Name", cou)
if tes == 'daily tests':
sql = sql.replace("Testing Entity Column", "New_Test")
sql = sql.replace("db6", "db6file4")
else:
sql = sql.replace("Testing Entity Column", "Total")
bucket.update({'sql': sql})
if real_question == "Provide me with the number of (Testing Entity) done by (Country) in (Month).":
sql = "Select (Select Testing Entity Column from db6 where date = 'Time End' and Entity = \" Country Entity\") - (Select Testing Entity Column from db6 where date = 'Time Start' and Entity = \"Country Entity\")"
time_start, time_end = monthconvert4(mon)
sql = sql.replace("Time End", time_end)
sql = sql.replace("Time Start", time_start)
sql = sql.replace("Country Entity", cou)
if tes == 'daily tests':
sql = sql.replace("Testing Entity Column", "New_Test")
sql = sql.replace("db6", "db6file4")
else:
sql = sql.replace("Testing Entity Column", "Total")
bucket.update({'sql': sql})
if real_question == "List the number of (Testing Entity) done by (Country) in (Month).":
sql = "Select (Select Testing Entity Column from db6 where date = 'Time End' and Entity = \" Country Entity\") - (Select Testing Entity Column from db6 where date = 'Time Start' and Entity = \"Country Entity\")"
time_start, time_end = monthconvert4(mon)
sql = sql.replace("Time End", time_end)
sql = sql.replace("Time Start", time_start)
sql = sql.replace("Country Entity", cou)
if tes == 'daily tests':
sql = sql.replace("Testing Entity Column", "New_Test")
sql = sql.replace("db6", "db6file4")
else:
sql = sql.replace("Testing Entity Column", "Total")
bucket.update({'sql': sql})
if real_question == "What is the (Rate Entity) in (Country) in (Day), (Month)?":
sql = "Select Rate Entity Column from db6file2 where date = 'Time Entity' and Entity = \"Country Entity\""
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
sql = sql.replace("Country Entity", cou)
if rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "pos_rate")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "100-pos_rate")
else:
sql = sql.replace("Rate Entity Column", "daily_rate")
sql = sql.replace("db6file2", "db6file3")
bucket.update({'sql': sql})
if real_question == "Give me the (Rate Entity) in (Country) in (Day), (Month).":
sql = "Select Rate Entity Column from db6file2 where date = 'Time Entity' and Entity = \"Country Entity\""
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
sql = sql.replace("Country Entity", cou)
if rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "pos_rate")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "100-pos_rate")
else:
sql = sql.replace("Rate Entity Column", "daily_rate")
sql = sql.replace("db6file2", "db6file3")
bucket.update({'sql': sql})
if real_question == "Provide me with the (Rate Entity) in (Country) in (Day), (Month).":
sql = "Select Rate Entity Column from db6file2 where date = 'Time Entity' and Entity = \"Country Entity\""
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
sql = sql.replace("Country Entity", cou)
if rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "pos_rate")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "100-pos_rate")
else:
sql = sql.replace("Rate Entity Column", "daily_rate")
sql = sql.replace("db6file2", "db6file3")
bucket.update({'sql': sql})
if real_question == "List the (Rate Entity) in (Country) in (Day), (Month).":
sql = "Select Rate Entity Column from db6file2 where date = 'Time Entity' and Entity = \"Country Entity\""
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
sql = sql.replace("Country Entity", cou)
if rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "pos_rate")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "100-pos_rate")
else:
sql = sql.replace("Rate Entity Column", "daily_rate")
sql = sql.replace("db6file2", "db6file3")
bucket.update({'sql': sql})
if real_question == "Which country has the (Value Entity) (Testing Entity) in (Day), (Month)?":
sql = "Select Entity from db6 where date = 'Time Entity' order by Testing Entity Column Value Entity"
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
if tes == 'daily tests':
sql = sql.replace("Testing Entity Column", "New_Test")
sql = sql.replace("db6", "db6file4")
else:
sql = sql.replace("Testing Entity Column", "Total")
bucket.update({'sql': sql})
if real_question == "Give me the country that has the (Value Entity) (Testing Entity) in (Day), (Month).":
sql = "Select Entity from db6 where date = 'Time Entity' order by Testing Entity Column Value Entity"
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
if tes == 'daily tests':
sql = sql.replace("Testing Entity Column", "New_Test")
sql = sql.replace("db6", "db6file4")
else:
sql = sql.replace("Testing Entity Column", "Total")
bucket.update({'sql': sql})
if real_question == "Provide me with the country that has the (Value Entity) (Testing Entity) in (Month).":
sql = "Select e1 from (Select Entity as E1, Testing Entity Column as tot1 from db6 where date = 'Time End') as t1 Inner Join (Select Entity as e2, Testing Entity Column as tot2 from db6 where date = 'Time Start') as t2 on t1.e1=t2.e2 order by t1.tot1-t2.tot2 Value Entity"
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
if tes == 'daily tests':
sql = sql.replace("Testing Entity Column", "New_Test")
sql = sql.replace("db6", "db6file4")
else:
sql = sql.replace("Testing Entity Column", "Total")
time_start, time_end = monthconvert4(mon)
sql = sql.replace("Time End", time_end)
sql = sql.replace("Time Start", time_start)
bucket.update({'sql': sql})
if real_question == "List the country that has the (Value Entity) (Testing Entity) in (Month).":
sql = "Select e1 from (Select Entity as E1, Testing Entity Column as tot1 from db6 where date = 'Time End') as t1 Inner Join (Select Entity as e2, Testing Entity Column as tot2 from db6 where date = 'Time Start') as t2 on t1.e1=t2.e2 order by t1.tot1-t2.tot2 Value Entity"
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
if tes == 'daily tests':
sql = sql.replace("Testing Entity Column", "New_Test")
sql = sql.replace("db6", "db6file4")
else:
sql = sql.replace("Testing Entity Column", "Total")
time_start, time_end = monthconvert4(mon)
sql = sql.replace("Time End", time_end)
sql = sql.replace("Time Start", time_start)
bucket.update({'sql': sql})
if real_question == "Which country has the (Value Entity) (Rate Entity) in (Day), (Month)?":
sql = "Select Entity from db6file2 where date = 'Time Entity' order by Rate Entity Column Value Entity"
if rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "pos_rate")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "100-pos_rate")
else:
sql = sql.replace("Rate Entity Column", "daily_rate")
sql = sql.replace("db6file2", "db6file3")
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Give me the country that has the (Value Entity) (Rate Entity) in (Day), (Month).":
sql = "Select Entity from db6file2 where date = 'Time Entity' order by Rate Entity Column Value Entity"
if rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "pos_rate")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "100-pos_rate")
else:
sql = sql.replace("Rate Entity Column", "daily_rate")
sql = sql.replace("db6file2", "db6file3")
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "Provide me with the country that has the (Value Entity) (Rate Entity) in (Day), (Month).":
sql = "Select Entity from db6file2 where date = 'Time Entity' order by Rate Entity Column Value Entity"
if rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "pos_rate")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "100-pos_rate")
else:
sql = sql.replace("Rate Entity Column", "daily_rate")
sql = sql.replace("db6file2", "db6file3")
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
if real_question == "List the country that has the (Value Entity) (Rate Entity) in (Day), (Month).":
sql = "Select Entity from db6file2 where date = 'Time Entity' order by Rate Entity Column Value Entity"
if rat == 'percent positive rate':
sql = sql.replace("Rate Entity Column", "pos_rate")
elif rat == 'percent negative rate':
sql = sql.replace("Rate Entity Column", "100-pos_rate")
else:
sql = sql.replace("Rate Entity Column", "daily_rate")
sql = sql.replace("db6file2", "db6file3")
given_date = dateconvert6(day, mon)
sql = sql.replace("Time Entity", given_date)
if val == 'highest' or val == 'most':
sql = sql.replace('Value Entity', 'desc limit 0' + ', 1')
else:
sql = sql.replace('Value Entity', 'asc limit 0' + ', 1')
bucket.update({'sql': sql})
| 49.39683
| 500
| 0.601281
| 19,123
| 158,959
| 4.895048
| 0.019662
| 0.058585
| 0.110824
| 0.024699
| 0.955378
| 0.952216
| 0.948754
| 0.947707
| 0.947558
| 0.945411
| 0
| 0.017643
| 0.270799
| 158,959
| 3,217
| 501
| 49.412185
| 0.789929
| 0
| 0
| 0.886672
| 0
| 0.040663
| 0.412507
| 0.038281
| 0.000377
| 0
| 0
| 0
| 0
| 1
| 0.002636
| false
| 0.006401
| 0.002636
| 0
| 0.007907
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c1759d7f2c72c1cd37f1f5a58b03d9ab18ba865
| 12,370
|
py
|
Python
|
tests/test_basic_laplace_kernels.py
|
dbstein/pybie2d
|
1c2d6c05f6dbb4f1ab4476d3824f4dde20f90d58
|
[
"Apache-2.0"
] | 11
|
2018-10-26T17:34:29.000Z
|
2020-04-27T21:21:33.000Z
|
tests/test_basic_laplace_kernels.py
|
dbstein/pybie2d
|
1c2d6c05f6dbb4f1ab4476d3824f4dde20f90d58
|
[
"Apache-2.0"
] | null | null | null |
tests/test_basic_laplace_kernels.py
|
dbstein/pybie2d
|
1c2d6c05f6dbb4f1ab4476d3824f4dde20f90d58
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import pybie2d
from pybie2d.kernels.laplace import Laplace_Kernel_Apply, Laplace_Kernel_Form
"""
Test all of the Laplace Kernel Functions against each other
"""
def get_random(sh, dtype):
r = np.random.rand(*sh).astype(dtype)
if dtype is complex:
r += 1j*np.random.rand(*sh)
return r
ns = 100
nt = 200
################################################################################
# Float ########################################################################
################################################################################
dtype = float
source = get_random([2, ns], float)
target = get_random([2, nt], float)
dipvec = get_random([2, ns], float)
charge = get_random([ns,], dtype)
dipstr = get_random([ns,], dtype)
############################################################################
# Without gradients
# Laplace Kernel, charge only
# force usage of numba
pot1 = Laplace_Kernel_Apply(source, target, charge=charge, backend='numba', dtype=dtype)
# force usage of FMM
pot2 = Laplace_Kernel_Apply(source, target, charge=charge, backend='FMM', dtype=dtype)
# form the matrix
MAT = Laplace_Kernel_Form(source, target, ifcharge=True)
pot3 = MAT.dot(charge)
def test1():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
# Laplace Kernel, dipole only
# force usage of numba
pot1 = Laplace_Kernel_Apply(source, target, dipstr=dipstr, dipvec=dipvec, backend='numba', dtype=dtype)
# force usage of FMM
pot2 = Laplace_Kernel_Apply(source, target, dipstr=dipstr, dipvec=dipvec, backend='FMM', dtype=dtype)
# form the matrix
MAT = Laplace_Kernel_Form(source, target, ifdipole=True, dipvec=dipvec)
pot3 = MAT.dot(dipstr)
def test2():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
# Laplace Kernel, charge + dipstr
# force usage of numba
pot1 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=dipstr, dipvec=dipvec, backend='numba', dtype=dtype)
# force usage of FMM
pot2 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=dipstr, dipvec=dipvec, backend='FMM', dtype=dtype)
def test3():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
# Laplace Kernel, charge + dipstr, but one a scalar multiple of the other
# force usage of numba
pot1 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=0.5*charge, dipvec=dipvec, backend='numba', dtype=dtype)
# force usage of FMM
pot2 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=0.5*charge, dipvec=dipvec, backend='FMM', dtype=dtype)
# form the matrix
MAT = Laplace_Kernel_Form(source, target, ifcharge=True, chweight=1.0, ifdipole=True, dpweight=0.5, dipvec=dipvec)
pot3 = MAT.dot(charge)
def test4():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
############################################################################
# With gradients now
# Laplace Kernel, charge only
# force usage of numba
pot1, gradx1, grady1 = Laplace_Kernel_Apply(source, target, charge=charge, backend='numba', gradient=True, dtype=dtype)
# force usage of FMM
pot2, gradx2, grady2 = Laplace_Kernel_Apply(source, target, charge=charge, backend='FMM', gradient=True, dtype=dtype)
# form the matrix
MAT, MATX, MATY = Laplace_Kernel_Form(source, target, ifcharge=True, gradient=True)
pot3 = MAT.dot(charge)
gradx3 = MATX.dot(charge)
grady3 = MATY.dot(charge)
def test5():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx2, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx3, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady2, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady3, atol=0.0, rtol=1e-8)
# Laplace Kernel, dipole only
# force usage of numba
pot1, gradx1, grady1 = Laplace_Kernel_Apply(source, target, dipstr=dipstr, dipvec=dipvec, backend='numba', gradient=True, dtype=dtype)
# force usage of FMM
pot2, gradx2, grady2 = Laplace_Kernel_Apply(source, target, dipstr=dipstr, dipvec=dipvec, backend='FMM', gradient=True, dtype=dtype)
# form the matrix
MAT, MATX, MATY = Laplace_Kernel_Form(source, target, ifdipole=True, dipvec=dipvec, gradient=True)
pot3 = MAT.dot(dipstr)
gradx3 = MATX.dot(dipstr)
grady3 = MATY.dot(dipstr)
def test6():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx2, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx3, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady2, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady3, atol=0.0, rtol=1e-8)
# Laplace Kernel, charge + dipstr
# force usage of numba
pot1, gradx1, grady1 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=dipstr, dipvec=dipvec, backend='numba', gradient=True, dtype=dtype)
# force usage of FMM
pot2, gradx2, grady2 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=dipstr, dipvec=dipvec, backend='FMM', gradient=True, dtype=dtype)
def test7():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx2, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady2, atol=0.0, rtol=1e-8)
# Laplace Kernel, charge + dipstr, but one a scalar multiple of the other
# force usage of numba
pot1, gradx1, grady1 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=0.5*charge, dipvec=dipvec, backend='numba', gradient=True, dtype=dtype)
# force usage of FMM
pot2, gradx2, grady2 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=0.5*charge, dipvec=dipvec, backend='FMM', gradient=True, dtype=dtype)
# form the matrix
MAT, MATX, MATY = Laplace_Kernel_Form(source, target, ifcharge=True, chweight=1.0, ifdipole=True, dpweight=0.5, dipvec=dipvec, gradient=True)
pot3 = MAT.dot(charge)
gradx3 = MATX.dot(charge)
grady3 = MATY.dot(charge)
def test8():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx2, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx3, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady2, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady3, atol=0.0, rtol=1e-8)
################################################################################
# Complex ######################################################################
################################################################################
dtype = complex
source = get_random([2, ns], float)
target = get_random([2, nt], float)
dipvec = get_random([2, ns], float)
charge = get_random([ns,], dtype)
dipstr = get_random([ns,], dtype)
############################################################################
# Without gradients
# Laplace Kernel, charge only
# force usage of numba
pot1 = Laplace_Kernel_Apply(source, target, charge=charge, backend='numba', dtype=dtype)
# force usage of FMM
pot2 = Laplace_Kernel_Apply(source, target, charge=charge, backend='FMM', dtype=dtype)
# form the matrix
MAT = Laplace_Kernel_Form(source, target, ifcharge=True)
pot3 = MAT.dot(charge)
def test9():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
# Laplace Kernel, dipole only
# force usage of numba
pot1 = Laplace_Kernel_Apply(source, target, dipstr=dipstr, dipvec=dipvec, backend='numba', dtype=dtype)
# force usage of FMM
pot2 = Laplace_Kernel_Apply(source, target, dipstr=dipstr, dipvec=dipvec, backend='FMM', dtype=dtype)
# form the matrix
MAT = Laplace_Kernel_Form(source, target, ifdipole=True, dipvec=dipvec)
pot3 = MAT.dot(dipstr)
def test10():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
# Laplace Kernel, charge + dipstr
# force usage of numba
pot1 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=dipstr, dipvec=dipvec, backend='numba', dtype=dtype)
# force usage of FMM
pot2 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=dipstr, dipvec=dipvec, backend='FMM', dtype=dtype)
def test11():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
# Laplace Kernel, charge + dipstr, but one a scalar multiple of the other
# force usage of numba
pot1 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=0.5*charge, dipvec=dipvec, backend='numba', dtype=dtype)
# force usage of FMM
pot2 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=0.5*charge, dipvec=dipvec, backend='FMM', dtype=dtype)
# form the matrix
MAT = Laplace_Kernel_Form(source, target, ifcharge=True, chweight=1.0, ifdipole=True, dpweight=0.5, dipvec=dipvec)
pot3 = MAT.dot(charge)
def test12():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
############################################################################
# With gradients now
# Laplace Kernel, charge only
# force usage of numba
pot1, gradx1, grady1 = Laplace_Kernel_Apply(source, target, charge=charge, backend='numba', gradient=True, dtype=dtype)
# force usage of FMM
pot2, gradx2, grady2 = Laplace_Kernel_Apply(source, target, charge=charge, backend='FMM', gradient=True, dtype=dtype)
# form the matrix
MAT, MATX, MATY = Laplace_Kernel_Form(source, target, ifcharge=True, gradient=True)
pot3 = MAT.dot(charge)
gradx3 = MATX.dot(charge)
grady3 = MATY.dot(charge)
def test13():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx2, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx3, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady2, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady3, atol=0.0, rtol=1e-8)
# Laplace Kernel, dipole only
# force usage of numba
pot1, gradx1, grady1 = Laplace_Kernel_Apply(source, target, dipstr=dipstr, dipvec=dipvec, backend='numba', gradient=True, dtype=dtype)
# force usage of FMM
pot2, gradx2, grady2 = Laplace_Kernel_Apply(source, target, dipstr=dipstr, dipvec=dipvec, backend='FMM', gradient=True, dtype=dtype)
# form the matrix
MAT, MATX, MATY = Laplace_Kernel_Form(source, target, ifdipole=True, dipvec=dipvec, gradient=True)
pot3 = MAT.dot(dipstr)
gradx3 = MATX.dot(dipstr)
grady3 = MATY.dot(dipstr)
def test14():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx2, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx3, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady2, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady3, atol=0.0, rtol=1e-8)
# Laplace Kernel, charge + dipstr
# force usage of numba
pot1, gradx1, grady1 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=dipstr, dipvec=dipvec, backend='numba', gradient=True, dtype=dtype)
# force usage of FMM
pot2, gradx2, grady2 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=dipstr, dipvec=dipvec, backend='FMM', gradient=True, dtype=dtype)
def test15():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx2, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady2, atol=0.0, rtol=1e-8)
# Laplace Kernel, charge + dipstr, but one a scalar multiple of the other
# force usage of numba
pot1, gradx1, grady1 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=0.5*charge, dipvec=dipvec, backend='numba', gradient=True, dtype=dtype)
# force usage of FMM
pot2, gradx2, grady2 = Laplace_Kernel_Apply(source, target, charge=charge, dipstr=0.5*charge, dipvec=dipvec, backend='FMM', gradient=True, dtype=dtype)
# form the matrix
MAT, MATX, MATY = Laplace_Kernel_Form(source, target, ifcharge=True, chweight=1.0, ifdipole=True, dpweight=0.5, dipvec=dipvec, gradient=True)
pot3 = MAT.dot(charge)
gradx3 = MATX.dot(charge)
grady3 = MATY.dot(charge)
def test16():
assert np.allclose(pot1, pot2, atol=0.0, rtol=1e-8)
assert np.allclose(pot1, pot3, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx2, atol=0.0, rtol=1e-8)
assert np.allclose(gradx1, gradx3, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady2, atol=0.0, rtol=1e-8)
assert np.allclose(grady1, grady3, atol=0.0, rtol=1e-8)
| 43.251748
| 153
| 0.685287
| 1,843
| 12,370
| 4.543679
| 0.05643
| 0.097803
| 0.106998
| 0.066874
| 0.95701
| 0.95701
| 0.95701
| 0.95701
| 0.95701
| 0.95701
| 0
| 0.045597
| 0.124171
| 12,370
| 285
| 154
| 43.403509
| 0.72734
| 0.126192
| 0
| 0.82716
| 0
| 0
| 0.012919
| 0
| 0
| 0
| 0
| 0
| 0.345679
| 1
| 0.104938
| false
| 0
| 0.018519
| 0
| 0.12963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1b5b993aa75ceec9ade92634f81442efead7d89
| 73,707
|
py
|
Python
|
embyapi/api/user_activity_api_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
embyapi/api/user_activity_api_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
embyapi/api/user_activity_api_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
Emby Server API
Explore the Emby Server API # noqa: E501
OpenAPI spec version: 4.1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from embyapi.api_client import ApiClient
class UserActivityAPIApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_user_usage_stats_by_breakdowntype_breakdownreport(self, breakdown_type, **kwargs): # noqa: E501
"""Gets a breakdown of a usage metric # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_by_breakdowntype_breakdownreport(breakdown_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str breakdown_type: Breakdown type (required)
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_by_breakdowntype_breakdownreport_with_http_info(breakdown_type, **kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_by_breakdowntype_breakdownreport_with_http_info(breakdown_type, **kwargs) # noqa: E501
return data
def get_user_usage_stats_by_breakdowntype_breakdownreport_with_http_info(self, breakdown_type, **kwargs): # noqa: E501
"""Gets a breakdown of a usage metric # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_by_breakdowntype_breakdownreport_with_http_info(breakdown_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str breakdown_type: Breakdown type (required)
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['breakdown_type', 'days', 'end_date'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_by_breakdowntype_breakdownreport" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'breakdown_type' is set
if ('breakdown_type' not in params or
params['breakdown_type'] is None):
raise ValueError("Missing the required parameter `breakdown_type` when calling `get_user_usage_stats_by_breakdowntype_breakdownreport`") # noqa: E501
collection_formats = {}
path_params = {}
if 'breakdown_type' in params:
path_params['BreakdownType'] = params['breakdown_type'] # noqa: E501
query_params = []
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/{BreakdownType}/BreakdownReport', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_by_userid_by_date_getitems(self, user_id, _date, **kwargs): # noqa: E501
"""Gets activity for {USER} for {Date} formatted as yyyy-MM-dd # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_by_userid_by_date_getitems(user_id, _date, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: User Id (required)
:param str _date: UTC DateTime, Format yyyy-MM-dd (required)
:param str filter: Comma separated list of media types to filter (movies,series)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_by_userid_by_date_getitems_with_http_info(user_id, _date, **kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_by_userid_by_date_getitems_with_http_info(user_id, _date, **kwargs) # noqa: E501
return data
def get_user_usage_stats_by_userid_by_date_getitems_with_http_info(self, user_id, _date, **kwargs): # noqa: E501
"""Gets activity for {USER} for {Date} formatted as yyyy-MM-dd # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_by_userid_by_date_getitems_with_http_info(user_id, _date, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: User Id (required)
:param str _date: UTC DateTime, Format yyyy-MM-dd (required)
:param str filter: Comma separated list of media types to filter (movies,series)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', '_date', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_by_userid_by_date_getitems" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_usage_stats_by_userid_by_date_getitems`") # noqa: E501
# verify the required parameter '_date' is set
if ('_date' not in params or
params['_date'] is None):
raise ValueError("Missing the required parameter `_date` when calling `get_user_usage_stats_by_userid_by_date_getitems`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['UserID'] = params['user_id'] # noqa: E501
if '_date' in params:
path_params['Date'] = params['_date'] # noqa: E501
query_params = []
if 'filter' in params:
query_params.append(('Filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/{UserID}/{Date}/GetItems', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_durationhistogramreport(self, **kwargs): # noqa: E501
"""Gets duration histogram # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_durationhistogramreport(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:param str filter: Comma separated list of media types to filter (movies,series)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_durationhistogramreport_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_durationhistogramreport_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_durationhistogramreport_with_http_info(self, **kwargs): # noqa: E501
"""Gets duration histogram # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_durationhistogramreport_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:param str filter: Comma separated list of media types to filter (movies,series)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['days', 'end_date', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_durationhistogramreport" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/DurationHistogramReport', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_hourlyreport(self, **kwargs): # noqa: E501
"""Gets a report of the available activity per hour # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_hourlyreport(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:param str filter: Comma separated list of media types to filter (movies,series)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_hourlyreport_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_hourlyreport_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_hourlyreport_with_http_info(self, **kwargs): # noqa: E501
"""Gets a report of the available activity per hour # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_hourlyreport_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:param str filter: Comma separated list of media types to filter (movies,series)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['days', 'end_date', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_hourlyreport" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/HourlyReport', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_load_backup(self, backupfile, **kwargs): # noqa: E501
"""Loads a backup from a file # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_load_backup(backupfile, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str backupfile: File name of file to load (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_load_backup_with_http_info(backupfile, **kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_load_backup_with_http_info(backupfile, **kwargs) # noqa: E501
return data
def get_user_usage_stats_load_backup_with_http_info(self, backupfile, **kwargs): # noqa: E501
"""Loads a backup from a file # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_load_backup_with_http_info(backupfile, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str backupfile: File name of file to load (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['backupfile'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_load_backup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'backupfile' is set
if ('backupfile' not in params or
params['backupfile'] is None):
raise ValueError("Missing the required parameter `backupfile` when calling `get_user_usage_stats_load_backup`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'backupfile' in params:
query_params.append(('backupfile', params['backupfile'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/load_backup', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_moviesreport(self, **kwargs): # noqa: E501
"""Gets Movies counts # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_moviesreport(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_moviesreport_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_moviesreport_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_moviesreport_with_http_info(self, **kwargs): # noqa: E501
"""Gets Movies counts # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_moviesreport_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['days', 'end_date'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_moviesreport" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/MoviesReport', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_playactivity(self, **kwargs): # noqa: E501
"""Gets play activity for number of days # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_playactivity(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:param str filter: Comma separated list of media types to filter (movies,series)
:param str data_type: Data type to return (count,time)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_playactivity_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_playactivity_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_playactivity_with_http_info(self, **kwargs): # noqa: E501
"""Gets play activity for number of days # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_playactivity_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:param str filter: Comma separated list of media types to filter (movies,series)
:param str data_type: Data type to return (count,time)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['days', 'end_date', 'filter', 'data_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_playactivity" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'data_type' in params:
query_params.append(('data_type', params['data_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/PlayActivity', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_process_list(self, **kwargs): # noqa: E501
"""Gets a list of process Info # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_process_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_process_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_process_list_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_process_list_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of process Info # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_process_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_process_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/process_list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_resource_usage(self, **kwargs): # noqa: E501
"""Gets Resource Usage Info # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_resource_usage(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int hours: Number of Hours
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_resource_usage_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_resource_usage_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_resource_usage_with_http_info(self, **kwargs): # noqa: E501
"""Gets Resource Usage Info # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_resource_usage_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int hours: Number of Hours
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['hours'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_resource_usage" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'hours' in params:
query_params.append(('hours', params['hours'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/resource_usage', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_save_backup(self, **kwargs): # noqa: E501
"""Saves a backup of the playback report data to the backup path # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_save_backup(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_save_backup_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_save_backup_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_save_backup_with_http_info(self, **kwargs): # noqa: E501
"""Saves a backup of the playback report data to the backup path # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_save_backup_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_save_backup" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/save_backup', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_session_list(self, **kwargs): # noqa: E501
"""Gets Session Info # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_session_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_session_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_session_list_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_session_list_with_http_info(self, **kwargs): # noqa: E501
"""Gets Session Info # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_session_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_session_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/session_list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_tvshowsreport(self, **kwargs): # noqa: E501
"""Gets TV Shows counts # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_tvshowsreport(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_tvshowsreport_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_tvshowsreport_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_tvshowsreport_with_http_info(self, **kwargs): # noqa: E501
"""Gets TV Shows counts # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_tvshowsreport_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['days', 'end_date'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_tvshowsreport" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/TvShowsReport', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_type_filter_list(self, **kwargs): # noqa: E501
"""Gets types filter list items # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_type_filter_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_type_filter_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_type_filter_list_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_type_filter_list_with_http_info(self, **kwargs): # noqa: E501
"""Gets types filter list items # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_type_filter_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_type_filter_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/type_filter_list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_user_activity(self, **kwargs): # noqa: E501
"""Gets a report of the available activity per hour # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_user_activity(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_user_activity_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_user_activity_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_user_activity_with_http_info(self, **kwargs): # noqa: E501
"""Gets a report of the available activity per hour # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_user_activity_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['days', 'end_date'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_user_activity" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/user_activity', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_user_list(self, **kwargs): # noqa: E501
"""Get users # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_user_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_user_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_user_list_with_http_info(**kwargs) # noqa: E501
return data
def get_user_usage_stats_user_list_with_http_info(self, **kwargs): # noqa: E501
"""Get users # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_user_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_user_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/user_list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_user_manage_by_action_by_id(self, action, id, **kwargs): # noqa: E501
"""Get users # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_user_manage_by_action_by_id(action, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str action: action to perform (required)
:param str id: user Id to perform the action on (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_user_manage_by_action_by_id_with_http_info(action, id, **kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_user_manage_by_action_by_id_with_http_info(action, id, **kwargs) # noqa: E501
return data
def get_user_usage_stats_user_manage_by_action_by_id_with_http_info(self, action, id, **kwargs): # noqa: E501
"""Get users # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_user_manage_by_action_by_id_with_http_info(action, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str action: action to perform (required)
:param str id: user Id to perform the action on (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['action', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_user_manage_by_action_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'action' is set
if ('action' not in params or
params['action'] is None):
raise ValueError("Missing the required parameter `action` when calling `get_user_usage_stats_user_manage_by_action_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_user_usage_stats_user_manage_by_action_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'action' in params:
path_params['Action'] = params['action'] # noqa: E501
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/user_manage/{Action}/{Id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_usage_stats_userplaylist(self, user_id, **kwargs): # noqa: E501
"""Gets a report of all played items for a user in a date period # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_userplaylist(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: User Id (required)
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:param str filter: Comma separated list of media types to filter (movies,series)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_usage_stats_userplaylist_with_http_info(user_id, **kwargs) # noqa: E501
else:
(data) = self.get_user_usage_stats_userplaylist_with_http_info(user_id, **kwargs) # noqa: E501
return data
def get_user_usage_stats_userplaylist_with_http_info(self, user_id, **kwargs): # noqa: E501
"""Gets a report of all played items for a user in a date period # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_usage_stats_userplaylist_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: User Id (required)
:param int days: Number of Days
:param str end_date: End date of the report in yyyy-MM-dd format
:param str filter: Comma separated list of media types to filter (movies,series)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'days', 'end_date', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_usage_stats_userplaylist" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_usage_stats_userplaylist`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'user_id' in params:
query_params.append(('user_id', params['user_id'])) # noqa: E501
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/UserPlaylist', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_user_usage_stats_import_backup(self, body, **kwargs): # noqa: E501
"""Post a backup for importing # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_user_usage_stats_import_backup(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_user_usage_stats_import_backup_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_user_usage_stats_import_backup_with_http_info(body, **kwargs) # noqa: E501
return data
def post_user_usage_stats_import_backup_with_http_info(self, body, **kwargs): # noqa: E501
"""Post a backup for importing # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_user_usage_stats_import_backup_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_user_usage_stats_import_backup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_user_usage_stats_import_backup`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/import_backup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_user_usage_stats_submit_custom_query(self, body, **kwargs): # noqa: E501
"""Submit an SQL query # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_user_usage_stats_submit_custom_query(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PlaybackReportingApiCustomQuery body: CustomQuery (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_user_usage_stats_submit_custom_query_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_user_usage_stats_submit_custom_query_with_http_info(body, **kwargs) # noqa: E501
return data
def post_user_usage_stats_submit_custom_query_with_http_info(self, body, **kwargs): # noqa: E501
"""Submit an SQL query # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_user_usage_stats_submit_custom_query_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PlaybackReportingApiCustomQuery body: CustomQuery (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_user_usage_stats_submit_custom_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_user_usage_stats_submit_custom_query`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/user_usage_stats/submit_custom_query', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.734232
| 162
| 0.616576
| 8,704
| 73,707
| 4.926241
| 0.02608
| 0.050562
| 0.052568
| 0.049956
| 0.976095
| 0.96658
| 0.958627
| 0.949718
| 0.939853
| 0.9319
| 0
| 0.015879
| 0.29681
| 73,707
| 1,854
| 163
| 39.755663
| 0.811403
| 0.327608
| 0
| 0.81206
| 1
| 0
| 0.180171
| 0.0663
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039196
| false
| 0
| 0.011055
| 0
| 0.108543
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b1c2576ebd086538618301156dc9d540e452b6c7
| 195
|
py
|
Python
|
clickpy/__main__.py
|
fitzlang/clickpy
|
5da15e3f42eea3a75c667546134e307ac853edbc
|
[
"MIT"
] | null | null | null |
clickpy/__main__.py
|
fitzlang/clickpy
|
5da15e3f42eea3a75c667546134e307ac853edbc
|
[
"MIT"
] | 7
|
2021-08-04T17:05:09.000Z
|
2021-08-05T04:30:23.000Z
|
clickpy/__main__.py
|
langnostic/clickpy
|
5da15e3f42eea3a75c667546134e307ac853edbc
|
[
"MIT"
] | null | null | null |
"""Clickpy, Automated mouse clicking script.""" # pragma: no cover
if __name__ == "__main__": # pragma: no cover
from clickpy import run # pragma: no cover
run() # pragma: no cover
| 27.857143
| 67
| 0.65641
| 25
| 195
| 4.8
| 0.56
| 0.266667
| 0.433333
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225641
| 195
| 6
| 68
| 32.5
| 0.794702
| 0.564103
| 0
| 0
| 0
| 0
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
5922b34f5d68f476b3cce86e51a45e9a1922b94f
| 16,540
|
py
|
Python
|
script/MidFlexMatchNeutralityPlot.py
|
mmore500/tag-olympics
|
23f89aa278b2866220696eddb89ecaa860e2d778
|
[
"MIT"
] | null | null | null |
script/MidFlexMatchNeutralityPlot.py
|
mmore500/tag-olympics
|
23f89aa278b2866220696eddb89ecaa860e2d778
|
[
"MIT"
] | null | null | null |
script/MidFlexMatchNeutralityPlot.py
|
mmore500/tag-olympics
|
23f89aa278b2866220696eddb89ecaa860e2d778
|
[
"MIT"
] | null | null | null |
import matplotlib
matplotlib.use('Agg')
import pandas as pd
import seaborn as sns
import sys
from matplotlib import pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
from keyname import keyname as kn
import itertools
from keyname import keyname as kn
from fileshash import fileshash as fsh
# open-type fonts
matplotlib.rcParams['pdf.fonttype'] = 42
df_key = pd.read_csv(sys.argv[1])
dataframe_filenames = sys.argv[2:]
dfs = [
(filename, pd.read_csv(filename))
for filename in dataframe_filenames
]
print("Data loaded!")
res = []
for filename, df in dfs:
# assumes datafiles are each from one replicate
for k, v in kn.unpack(filename).items():
df[k] = v
res.append(df)
df_data = pd.concat(res)
df_data['Slug'] = df_data['metric-slug']
key = {
row['Slug'] : {
col : row[col]
for col, val in row.iteritems() if col != 'Slug'
}
for idx, row in df_key.iterrows()
}
df_data['Metric'] = df_data.apply(
lambda x: key[x['Slug']]['Metric'],
axis=1
)
df_data['Dimension'] = df_data.apply(
lambda x: key[x['Slug']]['Dimension'],
axis=1
)
df_data['Dimension Type'] = df_data.apply(
lambda x: key[x['Slug']]['Dimension Type'],
axis=1
)
df_data['Inverse'] = df_data.apply(
lambda x: key[x['Slug']]['Inverse'],
axis=1
)
df_data['Metric'] = df_data.apply(
lambda x: (
('Started ' if 'Started' in x['Metric'] else '')
+ key[x['Slug']]['Base Metric']
),
axis=1
)
df_data['Treatment'] = df_data['treatment']
df_data['Dimension Count'] = df_data['Dimension']
df_data['Dimension'] = df_data.apply(
lambda x: x['Dimension Type'] + " " + str(x['Dimension']),
axis=1
)
df_data['Target Size'] = df_data['target-size']
df_data['Target Configuration'] = df_data['target-config']
print("Data crunched!")
plt.clf()
g = sns.FacetGrid(
data=df_data[
(df_data['Statistic'] == 'Median')
].pivot_table(
index=['Step', 'Metric', 'seed', 'Target Configuration', 'Target Size'],
columns='Measure',
values='Value',
aggfunc='first'
).reset_index(),
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'ls' : list(itertools.islice(
itertools.cycle(['-', '--', '-.', ':']),
len(df_data['Metric'].unique())
)),
'color' : sns.color_palette()
},
margin_titles=True
).set(ylim=(0, 0.5))
g.map(
sns.lineplot,
'Updates Elapsed',
'Genetic Distance',
style_order=list(df_data['Metric'].unique())
).add_legend()
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-distance-update',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
plt.clf()
g = sns.FacetGrid(
df_data[
(df_data['Measure'] == 'Genetic Distance')
& (df_data['Statistic'] == 'Median')
],
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'ls' : list(itertools.islice(
itertools.cycle(['-', '--', '-.', ':']),
len(df_data['Metric'].unique())
)),
'color' : sns.color_palette()
},
margin_titles=True
)
g.map(
sns.lineplot,
'Step',
'Value',
style_order=list(df_data['Metric'].unique())
).add_legend().set_ylabels(
"Genetic Distance"
).set_xlabels(
"Fixed Mutational Step"
)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-step-distance',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
g = sns.FacetGrid(
df_data[
(df_data['Measure'] == 'Updates Elapsed')
& (df_data['Statistic'] == 'Median')
],
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'ls' : list(itertools.islice(
itertools.cycle(['-', '--', '-.', ':']),
len(df_data['Metric'].unique())
)),
'color' : sns.color_palette()
},
margin_titles=True
)
g.map(
sns.lineplot,
'Step',
'Value',
style_order=list(df_data['Metric'].unique())
).add_legend().set_ylabels(
"Updates Elapsed"
).set_xlabels(
"Fixed Mutational Step"
)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-step-update',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
plt.clf()
g = sns.FacetGrid(
data=df_data[
(df_data['Statistic'] == 'Median')
].pivot_table(
index=['Metric', 'seed', 'Target Configuration', 'Target Size'],
values='Step',
aggfunc='max'
).reset_index(),
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'color' : sns.color_palette()
},
margin_titles=True,
)
g.map(
sns.barplot,
'Metric',
'Step',
order=list(df_data['Metric'].unique())
).set_ylabels("Fixed Mutations")
g.set_xticklabels(rotation=-90)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-step-bar',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
plt.clf()
g = sns.FacetGrid(
data=df_data[
(df_data['Statistic'] == 'Median')
].pivot_table(
index=['Metric', 'seed', 'Target Configuration', 'Target Size'],
values='Step',
aggfunc='max'
).reset_index(),
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'color' : sns.color_palette()
},
margin_titles=True,
)
g.map(
sns.barplot,
'Metric',
'Step',
order=list(df_data['Metric'].unique())
).set_ylabels("Fixed Mutations")
g.set_xticklabels(rotation=-90)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-step-bar',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
plt.clf()
g = sns.FacetGrid(
data=df_data[
(df_data['Statistic'] == 'Median')
& (df_data['Step'] == 500)
].pivot_table(
index=['Metric', 'seed', 'Target Configuration', 'Target Size'],
columns='Measure',
values='Value',
aggfunc='first',
).reset_index(),
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'color' : sns.color_palette()
},
margin_titles=True,
)
g.map(
sns.barplot,
'Metric',
'Genetic Distance',
order=list(df_data['Metric'].unique()),
)
g.set_xticklabels(rotation=-90)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-distance500-bar',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
plt.clf()
g = sns.FacetGrid(
data=df_data[
(df_data['Statistic'] == 'Median')
& (df_data['Step'] == 1000)
].pivot_table(
index=['Metric', 'seed', 'Target Configuration', 'Target Size'],
columns='Measure',
values='Value',
aggfunc='first',
).reset_index(),
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'color' : sns.color_palette()
},
margin_titles=True,
)
g.map(
sns.barplot,
'Metric',
'Genetic Distance',
order=list(df_data['Metric'].unique()),
)
g.set_xticklabels(rotation=-90)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-distance1000-bar',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
plt.clf()
g = sns.FacetGrid(
data=df_data[
(df_data['Statistic'] == 'Median')
& (df_data['Step'] == 1500)
].pivot_table(
index=['Metric', 'seed', 'Target Configuration', 'Target Size'],
columns='Measure',
values='Value',
aggfunc='first',
).reset_index(),
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'color' : sns.color_palette()
},
margin_titles=True,
)
g.map(
sns.barplot,
'Metric',
'Genetic Distance',
order=list(df_data['Metric'].unique()),
)
g.set_xticklabels(rotation=-90)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-distance1500-bar',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
plt.clf()
g = sns.FacetGrid(
data=df_data[
(df_data['Statistic'] == 'Median')
& (df_data['Step'] == 2000)
].pivot_table(
index=['Metric', 'seed', 'Target Configuration', 'Target Size'],
columns='Measure',
values='Value',
aggfunc='first',
).reset_index(),
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'color' : sns.color_palette()
},
margin_titles=True,
)
g.map(
sns.barplot,
'Metric',
'Genetic Distance',
order=list(df_data['Metric'].unique()),
)
g.set_xticklabels(rotation=-90)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-distance2000-bar',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
plt.clf()
g = sns.FacetGrid(
data=df_data[
(df_data['Statistic'] == 'Median')
& (df_data['Step'] == 1000)
].pivot_table(
index=['Metric', 'seed', 'Target Configuration', 'Target Size'],
columns='Measure',
values='Value',
aggfunc='first',
).reset_index(),
col='Target Configuration',
row='Target Size',
hue='Metric',
hue_kws={
'color' : sns.color_palette()
},
margin_titles=True,
)
g.map(
sns.barplot,
'Metric',
'Updates Elapsed',
order=list(df_data['Metric'].unique()),
)
g.set_xticklabels(rotation=-90)
assert len({kn.unpack(f)['experiment'] for f in dataframe_filenames}) == 1
assert len({kn.unpack(f)['fit-fun'] for f in dataframe_filenames}) == 1
outfile = kn.pack({
'experiment' : kn.unpack(dataframe_filenames[0])['experiment'],
'fit-fun' : kn.unpack(dataframe_filenames[0])['fit-fun'],
'viz' : 'neutrality-update1000-bar',
'_data_hathash_hash' : fsh.FilesHash().hash_files(dataframe_filenames),
'_script_fullcat_hash' : fsh.FilesHash(
file_parcel="full_parcel",
files_join="cat_join"
).hash_files([sys.argv[0]]),
# '_source_hash' :kn.unpack(dataframe_filename)['_source_hash'],
'ext' : '.pdf'
})
plt.savefig(
outfile,
transparent=True,
bbox_inches='tight',
pad_inches=0
)
print("output saved to", outfile)
| 27.429519
| 80
| 0.597098
| 1,971
| 16,540
| 4.820903
| 0.089802
| 0.039781
| 0.053673
| 0.035782
| 0.907072
| 0.887392
| 0.881499
| 0.876868
| 0.856557
| 0.844875
| 0
| 0.009844
| 0.232285
| 16,540
| 602
| 81
| 27.475083
| 0.738463
| 0.041778
| 0
| 0.799242
| 0
| 0
| 0.215458
| 0.012882
| 0
| 0
| 0
| 0
| 0.037879
| 1
| 0
| false
| 0
| 0.022727
| 0
| 0.022727
| 0.022727
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3cbd9ecda89d6967d7791261886b96f7ac070d13
| 18,826
|
py
|
Python
|
tests/sfp_test.py
|
sg893052/sonic-utilities
|
fdb79b8d65b8ca22232f4e6b140f593dd01613d5
|
[
"Apache-2.0"
] | 91
|
2016-03-23T14:24:41.000Z
|
2022-03-18T20:25:37.000Z
|
tests/sfp_test.py
|
sg893052/sonic-utilities
|
fdb79b8d65b8ca22232f4e6b140f593dd01613d5
|
[
"Apache-2.0"
] | 1,495
|
2017-02-15T10:49:10.000Z
|
2022-03-31T18:49:56.000Z
|
tests/sfp_test.py
|
sg893052/sonic-utilities
|
fdb79b8d65b8ca22232f4e6b140f593dd01613d5
|
[
"Apache-2.0"
] | 466
|
2016-04-25T09:31:23.000Z
|
2022-03-31T06:54:17.000Z
|
import sys
import os
from click.testing import CliRunner
from .mock_tables import dbconnector
test_path = os.path.dirname(os.path.abspath(__file__))
modules_path = os.path.dirname(test_path)
scripts_path = os.path.join(modules_path, "scripts")
sys.path.insert(0, modules_path)
import show.main as show
test_sfp_eeprom_with_dom_output = """\
Ethernet0: SFP EEPROM detected
Application Advertisement: N/A
Connector: No separable connector
Encoding: 64B66B
Extended Identifier: Power Class 3(2.5W max), CDR present in Rx Tx
Extended RateSelect Compliance: QSFP+ Rate Select Version 1
Identifier: QSFP28 or later
Length Cable Assembly(m): 3
Nominal Bit Rate(100Mbs): 255
Specification compliance:
10/40G Ethernet Compliance Code: 40G Active Cable (XLPPI)
Vendor Date Code(YYYY-MM-DD Lot): 2017-01-13
Vendor Name: Mellanox
Vendor OUI: 00-02-c9
Vendor PN: MFA1A00-C003
Vendor Rev: AC
Vendor SN: MT1706FT02064
ChannelMonitorValues:
RX1Power: 0.3802dBm
RX2Power: -0.4871dBm
RX3Power: -0.0860dBm
RX4Power: 0.3830dBm
TX1Bias: 6.7500mA
TX2Bias: 6.7500mA
TX3Bias: 6.7500mA
TX4Bias: 6.7500mA
ChannelThresholdValues:
RxPowerHighAlarm : 3.4001dBm
RxPowerHighWarning: 2.4000dBm
RxPowerLowAlarm : -13.5067dBm
RxPowerLowWarning : -9.5001dBm
TxBiasHighAlarm : 10.0000mA
TxBiasHighWarning : 9.5000mA
TxBiasLowAlarm : 0.5000mA
TxBiasLowWarning : 1.0000mA
ModuleMonitorValues:
Temperature: 30.9258C
Vcc: 3.2824Volts
ModuleThresholdValues:
TempHighAlarm : 75.0000C
TempHighWarning: 70.0000C
TempLowAlarm : -5.0000C
TempLowWarning : 0.0000C
VccHighAlarm : 3.6300Volts
VccHighWarning : 3.4650Volts
VccLowAlarm : 2.9700Volts
VccLowWarning : 3.1349Volts
"""
test_qsfp_dd_eeprom_with_dom_output = """\
Ethernet8: SFP EEPROM detected
Application Advertisement: 400GAUI-8 C2M (Annex 120E) - Active Cable assembly with BER < 2.6x10^-4
IB EDR (Arch.Spec.Vol.2) - Active Cable assembly with BER < 5x10^-5
IB QDR (Arch.Spec.Vol.2) - Active Cable assembly with BER < 10^-12
Connector: No separable connector
Encoding: Not supported for CMIS cables
Extended Identifier: Power Class 1(10.0W Max)
Extended RateSelect Compliance: Not supported for CMIS cables
Identifier: QSFP-DD Double Density 8X Pluggable Transceiver
Length Cable Assembly(m): 10
Nominal Bit Rate(100Mbs): Not supported for CMIS cables
Specification compliance: Not supported for CMIS cables
Vendor Date Code(YYYY-MM-DD Lot): 2020-05-22
Vendor Name: INNOLIGHT
Vendor OUI: 44-7c-7f
Vendor PN: C-DQ8FNM010-N00
Vendor Rev: 2A
Vendor SN: INKAO2900002A
ChannelMonitorValues:
RX1Power: -3.8595dBm
RX2Power: 8.1478dBm
RX3Power: -22.9243dBm
RX4Power: 1.175dBm
RX5Power: 1.2421dBm
RX6Power: 8.1489dBm
RX7Power: -3.5962dBm
RX8Power: -3.6131dBm
TX1Bias: 17.4760mA
TX1Power: 1.175dBm
TX2Bias: 17.4760mA
TX2Power: 1.175dBm
TX3Bias: 0.0000mA
TX3Power: 1.175dBm
TX4Bias: 0.0000mA
TX4Power: 1.175dBm
TX5Bias: 0.0000mAmA
TX5Power: 1.175dBm
TX6Bias: 8.2240mAmA
TX6Power: 1.175dBm
TX7Bias: 8.2240mAmA
TX7Power: 1.175dBm
TX8Bias: 8.2240mAmA
TX8Power: 1.175dBm
ChannelThresholdValues:
RxPowerHighAlarm : 6.9999dBm
RxPowerHighWarning: 4.9999dBm
RxPowerLowAlarm : -11.9044dBm
RxPowerLowWarning : -8.9008dBm
TxBiasHighAlarm : 14.9960mA
TxBiasHighWarning : 12.9980mA
TxBiasLowAlarm : 4.4960mA
TxBiasLowWarning : 5.0000mA
TxPowerHighAlarm : 6.9999dBm
TxPowerHighWarning: 4.9999dBm
TxPowerLowAlarm : -10.5012dBm
TxPowerLowWarning : -7.5007dBm
ModuleMonitorValues:
Temperature: 44.9883C
Vcc: 3.2999Volts
ModuleThresholdValues:
TempHighAlarm : 80.0000C
TempHighWarning: 75.0000C
TempLowAlarm : -10.0000C
TempLowWarning : -5.0000C
VccHighAlarm : 3.6352Volts
VccHighWarning : 3.4672Volts
VccLowAlarm : 2.9696Volts
VccLowWarning : 3.1304Volts
"""
test_sfp_eeprom_output = """\
Ethernet0: SFP EEPROM detected
Application Advertisement: N/A
Connector: No separable connector
Encoding: 64B66B
Extended Identifier: Power Class 3(2.5W max), CDR present in Rx Tx
Extended RateSelect Compliance: QSFP+ Rate Select Version 1
Identifier: QSFP28 or later
Length Cable Assembly(m): 3
Nominal Bit Rate(100Mbs): 255
Specification compliance:
10/40G Ethernet Compliance Code: 40G Active Cable (XLPPI)
Vendor Date Code(YYYY-MM-DD Lot): 2017-01-13
Vendor Name: Mellanox
Vendor OUI: 00-02-c9
Vendor PN: MFA1A00-C003
Vendor Rev: AC
Vendor SN: MT1706FT02064
"""
test_qsfp_dd_eeprom_output = """\
Ethernet8: SFP EEPROM detected
Application Advertisement: 400GAUI-8 C2M (Annex 120E) - Active Cable assembly with BER < 2.6x10^-4
IB EDR (Arch.Spec.Vol.2) - Active Cable assembly with BER < 5x10^-5
IB QDR (Arch.Spec.Vol.2) - Active Cable assembly with BER < 10^-12
Connector: No separable connector
Encoding: Not supported for CMIS cables
Extended Identifier: Power Class 1(10.0W Max)
Extended RateSelect Compliance: Not supported for CMIS cables
Identifier: QSFP-DD Double Density 8X Pluggable Transceiver
Length Cable Assembly(m): 10
Nominal Bit Rate(100Mbs): Not supported for CMIS cables
Specification compliance: Not supported for CMIS cables
Vendor Date Code(YYYY-MM-DD Lot): 2020-05-22
Vendor Name: INNOLIGHT
Vendor OUI: 44-7c-7f
Vendor PN: C-DQ8FNM010-N00
Vendor Rev: 2A
Vendor SN: INKAO2900002A
"""
test_sfp_eeprom_dom_all_output = """\
Ethernet0: SFP EEPROM detected
Application Advertisement: N/A
Connector: No separable connector
Encoding: 64B66B
Extended Identifier: Power Class 3(2.5W max), CDR present in Rx Tx
Extended RateSelect Compliance: QSFP+ Rate Select Version 1
Identifier: QSFP28 or later
Length Cable Assembly(m): 3
Nominal Bit Rate(100Mbs): 255
Specification compliance:
10/40G Ethernet Compliance Code: 40G Active Cable (XLPPI)
Vendor Date Code(YYYY-MM-DD Lot): 2017-01-13
Vendor Name: Mellanox
Vendor OUI: 00-02-c9
Vendor PN: MFA1A00-C003
Vendor Rev: AC
Vendor SN: MT1706FT02064
ChannelMonitorValues:
RX1Power: 0.3802dBm
RX2Power: -0.4871dBm
RX3Power: -0.0860dBm
RX4Power: 0.3830dBm
TX1Bias: 6.7500mA
TX2Bias: 6.7500mA
TX3Bias: 6.7500mA
TX4Bias: 6.7500mA
ChannelThresholdValues:
RxPowerHighAlarm : 3.4001dBm
RxPowerHighWarning: 2.4000dBm
RxPowerLowAlarm : -13.5067dBm
RxPowerLowWarning : -9.5001dBm
TxBiasHighAlarm : 10.0000mA
TxBiasHighWarning : 9.5000mA
TxBiasLowAlarm : 0.5000mA
TxBiasLowWarning : 1.0000mA
ModuleMonitorValues:
Temperature: 30.9258C
Vcc: 3.2824Volts
ModuleThresholdValues:
TempHighAlarm : 75.0000C
TempHighWarning: 70.0000C
TempLowAlarm : -5.0000C
TempLowWarning : 0.0000C
VccHighAlarm : 3.6300Volts
VccHighWarning : 3.4650Volts
VccLowAlarm : 2.9700Volts
VccLowWarning : 3.1349Volts
Ethernet4: SFP EEPROM Not detected
Ethernet64: SFP EEPROM detected
Application Advertisement: N/A
Connector: No separable connector
Encoding: 64B66B
Extended Identifier: Power Class 3(2.5W max), CDR present in Rx Tx
Extended RateSelect Compliance: QSFP+ Rate Select Version 1
Identifier: QSFP28 or later
Length Cable Assembly(m): 3
Nominal Bit Rate(100Mbs): 255
Specification compliance:
10/40G Ethernet Compliance Code: 40G Active Cable (XLPPI)
Vendor Date Code(YYYY-MM-DD Lot): 2017-01-13
Vendor Name: Mellanox
Vendor OUI: 00-02-c9
Vendor PN: MFA1A00-C003
Vendor Rev: AC
Vendor SN: MT1706FT02064
ChannelMonitorValues:
RX1Power: 0.3802dBm
RX2Power: -0.4871dBm
RX3Power: -0.0860dBm
RX4Power: 0.3830dBm
TX1Bias: 6.7500mA
TX2Bias: 6.7500mA
TX3Bias: 6.7500mA
TX4Bias: 6.7500mA
ChannelThresholdValues:
RxPowerHighAlarm : 3.4001dBm
RxPowerHighWarning: 2.4000dBm
RxPowerLowAlarm : -13.5067dBm
RxPowerLowWarning : -9.5001dBm
TxBiasHighAlarm : 10.0000mA
TxBiasHighWarning : 9.5000mA
TxBiasLowAlarm : 0.5000mA
TxBiasLowWarning : 1.0000mA
ModuleMonitorValues:
Temperature: 30.9258C
Vcc: 3.2824Volts
ModuleThresholdValues:
TempHighAlarm : 75.0000C
TempHighWarning: 70.0000C
TempLowAlarm : -5.0000C
TempLowWarning : 0.0000C
VccHighAlarm : 3.6300Volts
VccHighWarning : 3.4650Volts
VccLowAlarm : 2.9700Volts
VccLowWarning : 3.1349Volts
"""
test_sfp_eeprom_all_output = """\
Ethernet0: SFP EEPROM detected
Application Advertisement: N/A
Connector: No separable connector
Encoding: 64B66B
Extended Identifier: Power Class 3(2.5W max), CDR present in Rx Tx
Extended RateSelect Compliance: QSFP+ Rate Select Version 1
Identifier: QSFP28 or later
Length Cable Assembly(m): 3
Nominal Bit Rate(100Mbs): 255
Specification compliance:
10/40G Ethernet Compliance Code: 40G Active Cable (XLPPI)
Vendor Date Code(YYYY-MM-DD Lot): 2017-01-13
Vendor Name: Mellanox
Vendor OUI: 00-02-c9
Vendor PN: MFA1A00-C003
Vendor Rev: AC
Vendor SN: MT1706FT02064
Ethernet4: SFP EEPROM Not detected
Ethernet64: SFP EEPROM detected
Application Advertisement: N/A
Connector: No separable connector
Encoding: 64B66B
Extended Identifier: Power Class 3(2.5W max), CDR present in Rx Tx
Extended RateSelect Compliance: QSFP+ Rate Select Version 1
Identifier: QSFP28 or later
Length Cable Assembly(m): 3
Nominal Bit Rate(100Mbs): 255
Specification compliance:
10/40G Ethernet Compliance Code: 40G Active Cable (XLPPI)
Vendor Date Code(YYYY-MM-DD Lot): 2017-01-13
Vendor Name: Mellanox
Vendor OUI: 00-02-c9
Vendor PN: MFA1A00-C003
Vendor Rev: AC
Vendor SN: MT1706FT02064
"""
test_sfp_presence_all_output = """\
Port Presence
---------- -----------
Ethernet0 Present
Ethernet4 Not present
Ethernet64 Present
"""
class TestSFP(object):
@classmethod
def setup_class(cls):
print("SETUP")
os.environ["PATH"] += os.pathsep + scripts_path
os.environ["UTILITIES_UNIT_TESTING"] = "2"
def test_sfp_presence(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["presence"], ["Ethernet0"])
expected = """Port Presence
--------- ----------
Ethernet0 Present
"""
assert result.exit_code == 0
assert result.output == expected
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["presence"], ["Ethernet200"])
expected = """Port Presence
----------- -----------
Ethernet200 Not present
"""
assert result.exit_code == 0
assert result.output == expected
def test_sfp_eeprom_with_dom(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet0 -d"])
assert result.exit_code == 0
assert "\n".join([ l.rstrip() for l in result.output.split('\n')]) == test_sfp_eeprom_with_dom_output
def test_qsfp_dd_eeprom_with_dom(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet8 -d"])
assert result.exit_code == 0
assert "result.output == test_qsfp_dd_eeprom_with_dom_output"
def test_sfp_eeprom(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet0"])
assert result.exit_code == 0
assert "\n".join([ l.rstrip() for l in result.output.split('\n')]) == test_sfp_eeprom_output
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet200"])
result_lines = result.output.strip('\n')
expected = "Ethernet200: SFP EEPROM Not detected"
assert result_lines == expected
def test_qsfp_dd_eeprom(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet8"])
assert result.exit_code == 0
assert "result.output == test_qsfp_dd_eeprom_output"
@classmethod
def teardown_class(cls):
print("TEARDOWN")
os.environ["PATH"] = os.pathsep.join(os.environ["PATH"].split(os.pathsep)[:-1])
os.environ["UTILITIES_UNIT_TESTING"] = "0"
os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = ""
class Test_multiAsic_SFP(object):
@classmethod
def setup_class(cls):
print("SETUP")
os.environ["PATH"] += os.pathsep + scripts_path
os.environ["UTILITIES_UNIT_TESTING"] = "2"
os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "multi_asic"
def test_sfp_presence_with_ns(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["presence"], ["Ethernet0 -n asic0"])
expected = """Port Presence
--------- ----------
Ethernet0 Present
"""
assert result.exit_code == 0
assert result.output == expected
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["presence"], ["Ethernet200 -n asic0"])
expected = """Port Presence
----------- -----------
Ethernet200 Not present
"""
assert result.exit_code == 0
assert result.output == expected
def test_sfp_presence_all(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["presence"])
assert result.exit_code == 0
assert "\n".join([ l.rstrip() for l in result.output.split('\n')]) == test_sfp_presence_all_output
def test_sfp_eeprom_with_dom_with_ns(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet0 -d -n asic0"])
assert result.exit_code == 0
assert "\n".join([ l.rstrip() for l in result.output.split('\n')]) == test_sfp_eeprom_with_dom_output
def test_sfp_eeprom_with_ns(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet0 -n asic0"])
assert result.exit_code == 0
assert "\n".join([ l.rstrip() for l in result.output.split('\n')]) == test_sfp_eeprom_output
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet200 -n asic0"])
result_lines = result.output.strip('\n')
expected = "Ethernet200: SFP EEPROM Not detected"
assert result_lines == expected
def test_sfp_eeprom_with_ns(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet0 -n asic0"])
assert result.exit_code == 0
assert "\n".join([ l.rstrip() for l in result.output.split('\n')]) == test_sfp_eeprom_output
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["Ethernet200 -n asic0"])
result_lines = result.output.strip('\n')
expected = "Ethernet200: SFP EEPROM Not detected"
assert result_lines == expected
def test_sfp_eeprom_all(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"])
assert result.exit_code == 0
assert "\n".join([ l.rstrip() for l in result.output.split('\n')]) == test_sfp_eeprom_all_output
def test_sfp_eeprom_dom_all(self):
runner = CliRunner()
result = runner.invoke(show.cli.commands["interfaces"].commands["transceiver"].commands["eeprom"], ["-d"])
assert result.exit_code == 0
assert "\n".join([ l.rstrip() for l in result.output.split('\n')]) == test_sfp_eeprom_dom_all_output
@classmethod
def teardown_class(cls):
print("TEARDOWN")
os.environ["PATH"] = os.pathsep.join(os.environ["PATH"].split(os.pathsep)[:-1])
os.environ["UTILITIES_UNIT_TESTING"] = "0"
os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = ""
| 40.486022
| 134
| 0.606024
| 2,033
| 18,826
| 5.526316
| 0.139203
| 0.024833
| 0.020828
| 0.033289
| 0.88073
| 0.874499
| 0.86186
| 0.86008
| 0.858033
| 0.858033
| 0
| 0.090503
| 0.299214
| 18,826
| 464
| 135
| 40.573276
| 0.761085
| 0
| 0
| 0.772834
| 0
| 0.014052
| 0.703548
| 0.026878
| 0
| 0
| 0
| 0
| 0.0726
| 1
| 0.037471
| false
| 0
| 0.01171
| 0
| 0.053864
| 0.009368
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3cd6faac0d728cb873b71e576905e49524901bf5
| 22,952
|
py
|
Python
|
tests/plateform.py
|
gudhati/apptweak-api-python-library
|
f4a7f7e34548d6d216f3a297d63944c7adbf9667
|
[
"MIT"
] | 5
|
2019-05-21T14:44:57.000Z
|
2020-10-30T04:08:13.000Z
|
tests/plateform.py
|
gudhati/apptweak-api-python-library
|
f4a7f7e34548d6d216f3a297d63944c7adbf9667
|
[
"MIT"
] | 1
|
2020-08-28T02:42:37.000Z
|
2020-08-28T07:52:54.000Z
|
tests/plateform.py
|
gudhati/apptweak-api-python-library
|
f4a7f7e34548d6d216f3a297d63944c7adbf9667
|
[
"MIT"
] | 5
|
2019-07-18T13:38:01.000Z
|
2021-06-09T04:12:35.000Z
|
from apptweak import apptweak
import unittest
from unittest.mock import patch
@patch("apptweak.Ressource.http_request",return_value=True)
@patch("json.loads",return_value=True)
class TestPlateform(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
pass
def tearDown(self):
pass
def test_metadata(self, mock_load, mock_res):
#IOS
self.assertTrue(apptweak.Ios.metadata(123,{'a':'b'}))
self.assertTrue(apptweak.Ios.metadata(123))
with self.assertRaises(Exception):
apptweak.Ios.metadata({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.metadata()
with self.assertRaises(Exception):
apptweak.Ios.metadata(123,'string')
#ANDROID
self.assertTrue(apptweak.Android.metadata("com.company",{'a':'b'}))
self.assertTrue(apptweak.Android.metadata("com.company"))
with self.assertRaises(Exception):
apptweak.Android.metadata({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.metadata()
with self.assertRaises(Exception):
apptweak.Android.metadata("com.company",'string')
def test_ratings(self, mock_load, mock_res):
#IOS
self.assertTrue(apptweak.Ios.ratings(123,{'a':'b'}))
self.assertTrue(apptweak.Ios.ratings(123))
with self.assertRaises(Exception):
apptweak.Ios.ratings({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.ratings()
with self.assertRaises(Exception):
apptweak.Ios.ratings(123,'string')
#ANDROID
self.assertTrue(apptweak.Android.ratings("com.company",{'a':'b'}))
self.assertTrue(apptweak.Android.ratings("com.company"))
with self.assertRaises(Exception):
apptweak.Android.ratings({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.ratings()
with self.assertRaises(Exception):
apptweak.Android.ratings("com.company",'string')
def test_rankings(self, mock_load, mock_res):
#IOS
self.assertTrue(apptweak.Ios.rankings(123,{'a':'b'}))
self.assertTrue(apptweak.Ios.rankings(123))
with self.assertRaises(Exception):
apptweak.Ios.rankings({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.rankings()
with self.assertRaises(Exception):
apptweak.Ios.rankings(123,'string')
#ANDROID
self.assertTrue(apptweak.Android.rankings("com.company",{'country':'us'}))
with self.assertRaises(Exception):
apptweak.Android.rankings("com.company",{'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.rankings("com.company")
with self.assertRaises(Exception):
apptweak.Android.rankings({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.rankings()
with self.assertRaises(Exception):
apptweak.Android.rankings("com.company",'string')
def test_power(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.power(app_id,{'country':'b'}))
with self.assertRaises(Exception):
apptweak.Ios.power(app_id)
with self.assertRaises(Exception):
apptweak.Ios.power(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Ios.power({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.power()
with self.assertRaises(Exception):
apptweak.Ios.power(app_id,'string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.power(app_id,{'country':'b'}))
with self.assertRaises(Exception):
apptweak.Android.power(app_id)
with self.assertRaises(Exception):
apptweak.Android.power(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.power({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.power()
with self.assertRaises(Exception):
apptweak.Android.power(app_id,'string')
def test_backlinks(self, mock_load, mock_res):
#IOS
app_id = 123
with self.assertRaises(Exception):
apptweak.Ios.backlinks(app_id)
with self.assertRaises(Exception):
apptweak.Ios.backlinks(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Ios.backlinks({'a':'b'})
with self.assertRaises(Exception):
apptweak.Ios.backlinks()
with self.assertRaises(Exception):
apptweak.Ios.backlinks(app_id,'string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.backlinks(app_id))
self.assertTrue(apptweak.Android.backlinks(app_id),{'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.backlinks({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.backlinks()
with self.assertRaises(TypeError):
apptweak.Android.backlinks(app_id,'string')
def test_downloads(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.downloads(app_id,{'a':'b'}))
self.assertTrue(apptweak.Ios.downloads(app_id))
with self.assertRaises(Exception):
apptweak.Ios.downloads({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.downloads()
with self.assertRaises(Exception):
apptweak.Ios.downloads(app_id,'string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.downloads(app_id,{'a':'b'}))
self.assertTrue(apptweak.Android.downloads(app_id))
with self.assertRaises(Exception):
apptweak.Android.downloads({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.downloads()
with self.assertRaises(Exception):
apptweak.Android.downloads(app_id,'string')
def test_revenues(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.revenues(app_id,{'a':'b'}))
self.assertTrue(apptweak.Ios.revenues(app_id))
with self.assertRaises(Exception):
apptweak.Ios.revenues({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.revenues()
with self.assertRaises(Exception):
apptweak.Ios.revenues(app_id,'string')
#ANDROID
app_id = "com.company"
with self.assertRaises(Exception):
apptweak.Android.revenues(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.revenues(app_id)
with self.assertRaises(Exception):
apptweak.Android.revenues({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.revenues()
with self.assertRaises(Exception):
apptweak.Android.revenues(app_id,'string')
def test_reviews(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.reviews(app_id,{'a':'b'}))
self.assertTrue(apptweak.Ios.reviews(app_id))
with self.assertRaises(Exception):
apptweak.Ios.reviews({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.reviews()
with self.assertRaises(Exception):
apptweak.Ios.reviews(app_id,'string')
#ANDROID
app_id = "com.company"
with self.assertRaises(Exception):
apptweak.Android.reviews(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.reviews(app_id)
with self.assertRaises(Exception):
apptweak.Android.reviews({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.reviews()
with self.assertRaises(Exception):
apptweak.Android.reviews(app_id,'string')
def test_searches(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.searches({'term':'b'}))
with self.assertRaises(Exception):
apptweak.Ios.searches(app_id)
with self.assertRaises(Exception):
apptweak.Ios.searches("string")
with self.assertRaises(TypeError):
apptweak.Ios.searches()
with self.assertRaises(TypeError):
apptweak.Ios.searches(app_id,'string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.searches({'term':'b'}))
with self.assertRaises(Exception):
apptweak.Android.searches(app_id)
with self.assertRaises(Exception):
apptweak.Android.searches({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.searches()
with self.assertRaises(TypeError):
apptweak.Android.searches(app_id,'string')
def test_top_charts(self, mock_load, mock_res):
#IOS
cat_id = 123
self.assertTrue(apptweak.Ios.top_charts(cat_id,{'a':'b'}))
self.assertTrue(apptweak.Ios.top_charts(cat_id))
with self.assertRaises(Exception):
apptweak.Ios.top_charts({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.top_charts()
with self.assertRaises(Exception):
apptweak.Ios.top_charts(cat_id,'string')
#ANDROID
cat_id = "GAME"
self.assertTrue(apptweak.Android.top_charts(cat_id,{'a':'b'}))
self.assertTrue(apptweak.Android.top_charts(cat_id))
with self.assertRaises(Exception):
apptweak.Android.top_charts({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.top_charts()
with self.assertRaises(Exception):
apptweak.Android.top_charts(cat_id,'string')
def test_top_displayed_reviews(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.top_displayed_reviews(app_id,'most_useful',{'a':'b'}))
self.assertTrue(apptweak.Ios.top_displayed_reviews(app_id,'most_useful'))
with self.assertRaises(Exception):
apptweak.Ios.top_displayed_reviews(app_id,'most_us')
with self.assertRaises(TypeError):
apptweak.Ios.top_displayed_reviews(app_id)
with self.assertRaises(TypeError):
apptweak.Ios.top_displayed_reviews()
with self.assertRaises(Exception):
apptweak.Ios.top_displayed_reviews(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Ios.top_displayed_reviews(app_id,'most_useful','string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.top_displayed_reviews(app_id,'most_useful',{'language':'us'}))
with self.assertRaises(TypeError):
apptweak.Android.top_displayed_reviews(app_id,'most_useful')
with self.assertRaises(Exception):
apptweak.Android.top_displayed_reviews(app_id,'most_us',{'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.top_displayed_reviews(app_id,'most_us')
with self.assertRaises(TypeError):
apptweak.Android.top_displayed_reviews(app_id)
with self.assertRaises(TypeError):
apptweak.Android.top_displayed_reviews()
with self.assertRaises(Exception):
apptweak.Android.top_displayed_reviews(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.top_displayed_reviews(app_id,'most_useful','string')
def test_filtered_reviews(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.filtered_reviews(app_id,{'term':'b'}))
with self.assertRaises(Exception):
apptweak.Ios.filtered_reviews({'a':'b'})
with self.assertRaises(Exception):
apptweak.Ios.filtered_reviews(app_id,{'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.filtered_reviews()
with self.assertRaises(Exception):
apptweak.Ios.filtered_reviews(app_id,'string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.filtered_reviews(app_id,{'language':'en','term':'b'}))
with self.assertRaises(Exception):
apptweak.Android.filtered_reviews(app_id,{'term':'b'})
with self.assertRaises(Exception):
apptweak.Android.filtered_reviews({'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.filtered_reviews(app_id,{'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.filtered_reviews()
with self.assertRaises(Exception):
apptweak.Android.filtered_reviews(app_id,'string')
def test_review_stats(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.review_stats(app_id))
self.assertTrue(apptweak.Ios.review_stats(app_id,{'a':'b'}))
with self.assertRaises(Exception):
apptweak.Ios.review_stats({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.review_stats()
with self.assertRaises(Exception):
apptweak.Ios.review_stats(app_id,'string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.review_stats(app_id))
self.assertTrue(apptweak.Android.review_stats(app_id,{'a':'b'}))
with self.assertRaises(Exception):
apptweak.Android.review_stats({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.review_stats()
with self.assertRaises(Exception):
apptweak.Android.review_stats(app_id,'string')
def test_app_top_keywords(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.app_top_keywords(app_id))
self.assertTrue(apptweak.Ios.app_top_keywords(app_id,{'a':'b'}))
with self.assertRaises(Exception):
apptweak.Ios.app_top_keywords({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.app_top_keywords()
with self.assertRaises(Exception):
apptweak.Ios.app_top_keywords(app_id,'string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.app_top_keywords(app_id))
self.assertTrue(apptweak.Android.app_top_keywords(app_id,{'a':'b'}))
with self.assertRaises(Exception):
apptweak.Android.app_top_keywords({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.app_top_keywords()
with self.assertRaises(Exception):
apptweak.Android.app_top_keywords(app_id,'string')
def test_keywords_stats(self, mock_load, mock_res):
#IOS
self.assertTrue(apptweak.Ios.keywords_stats({'keywords':['a']}))
self.assertTrue(apptweak.Ios.keywords_stats({'keywords':['a','b']}))
with self.assertRaises(TypeError):
self.assertTrue(apptweak.Ios.keywords_stats())
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_stats({'keyword':['a','b']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_stats({'keywords':['a','b','a','b','a','b','a','b','9','10','11']}))
#ANDROID
self.assertTrue(apptweak.Android.keywords_stats({'keywords':['a']}))
self.assertTrue(apptweak.Android.keywords_stats({'keywords':['a','b']}))
with self.assertRaises(TypeError):
self.assertTrue(apptweak.Android.keywords_stats())
with self.assertRaises(Exception):
self.assertTrue(apptweak.Android.keywords_stats({'keyword':['a','b']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Android.keywords_stats({'keywords':['a','b','a','b','a','b','a','b','9','10','11']}))
def test_keywords_competitors(self, mock_load, mock_res):
#IOS
app_id = 123
self.assertTrue(apptweak.Ios.keywords_competitors(app_id))
self.assertTrue(apptweak.Ios.keywords_competitors(app_id,{'a':'b'}))
with self.assertRaises(Exception):
apptweak.Ios.keywords_competitors({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.keywords_competitors()
with self.assertRaises(Exception):
apptweak.Ios.keywords_competitors(app_id,'string')
#ANDROID
app_id = "com.company"
self.assertTrue(apptweak.Android.keywords_competitors(app_id))
self.assertTrue(apptweak.Android.keywords_competitors(app_id,{'a':'b'}))
with self.assertRaises(Exception):
apptweak.Android.keywords_competitors({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.keywords_competitors()
with self.assertRaises(Exception):
apptweak.Android.keywords_competitors(app_id,'string')
def test_keywords_ranking_competitors(self, mock_load, mock_res):
#IOS
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'keywords':['a'],'applications':[12]}))
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'keywords':['a','b'],'applications':[12,13]}))
with self.assertRaises(TypeError):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors())
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'keyword':['a','b']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'keywords':['a','b','a','b','a','b','a','b','9','10','11']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'applcations':[1,2]}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'applications':[5,4,3,2,1,6]}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'keywords':['a','b','a','b','a','b','a','b','9','10','11'],'applications':[1,2]}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'keyword':['a','b'],'applications':[5,4,3,2,1,6]}))
#ANDROID
self.assertTrue(apptweak.Android.keywords_ranking_competitors({'keywords':['a'],'applications':['12']}))
self.assertTrue(apptweak.Android.keywords_ranking_competitors({'keywords':['a','b'],'applications':['12','13']}))
with self.assertRaises(TypeError):
self.assertTrue(apptweak.Android.keywords_ranking_competitors())
with self.assertRaises(Exception):
self.assertTrue(apptweak.Android.keywords_ranking_competitors({'keyword':['a','b']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Android.keywords_ranking_competitors({'keywords':['a','b','a','b','a','b','a','b','9','10','11']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'applcations':['1','2']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'applications':['a','b','a','b','a','b','a']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'keywords':['a','b','a','b','a','b','a','b','9','10','11'],'applications':['a','b']}))
with self.assertRaises(Exception):
self.assertTrue(apptweak.Ios.keywords_ranking_competitors({'keyword':['a','b'],'applications':['5','4','3','2','1','6']}))
def test_category_top_keywords(self, mock_load, mock_res):
#IOS
cat_id = 123
self.assertTrue(apptweak.Ios.category_top_keywords(cat_id))
self.assertTrue(apptweak.Ios.category_top_keywords(cat_id,{'a':'b'}))
with self.assertRaises(Exception):
apptweak.Ios.category_top_keywords({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Ios.category_top_keywords()
with self.assertRaises(Exception):
apptweak.Ios.category_top_keywords(cat_id,'string')
#ANDROID
cat_id = "com.company"
self.assertTrue(apptweak.Android.category_top_keywords(cat_id))
self.assertTrue(apptweak.Android.category_top_keywords(cat_id,{'a':'b'}))
with self.assertRaises(Exception):
apptweak.Android.category_top_keywords({'a':'b'})
with self.assertRaises(TypeError):
apptweak.Android.category_top_keywords()
with self.assertRaises(Exception):
apptweak.Android.category_top_keywords(cat_id,'string')
def test_keywords_trending(self, mock_load, mock_res):
#IOS
self.assertTrue(apptweak.Ios.keywords_trending())
self.assertTrue(apptweak.Ios.keywords_trending({'a':'b'}))
with self.assertRaises(Exception):
apptweak.Ios.keywords_trending('string')
#ANDROID
app_id = "com.company"
with self.assertRaises(Exception):
apptweak.Android.keywords_trending(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.keywords_trending(app_id)
with self.assertRaises(Exception):
apptweak.Android.keywords_trending({'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.keywords_trending()
with self.assertRaises(Exception):
apptweak.Android.keywords_trending(app_id,'string')
def test_keywords_volume_history(self, mock_load, mock_res):
#IOS
self.assertTrue(apptweak.Ios.keywords_volume_history({'keywords':['b']}))
with self.assertRaises(Exception):
apptweak.Ios.keywords_volume_history({'keywrds':['b']})
with self.assertRaises(TypeError):
apptweak.Ios.keywords_volume_history()
with self.assertRaises(Exception):
apptweak.Ios.keywords_trending('string')
#ANDROID
app_id = "com.company"
with self.assertRaises(Exception):
apptweak.Android.keywords_volume_history(app_id,{'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.keywords_volume_history(app_id)
with self.assertRaises(Exception):
apptweak.Android.keywords_volume_history({'a':'b'})
with self.assertRaises(Exception):
apptweak.Android.keywords_volume_history()
with self.assertRaises(Exception):
apptweak.Android.keywords_volume_history(app_id,'string')
if __name__ == '__main__':
unittest.main()
| 45.539683
| 157
| 0.639509
| 2,549
| 22,952
| 5.602197
| 0.036485
| 0.085714
| 0.214286
| 0.223389
| 0.963305
| 0.947199
| 0.922479
| 0.830952
| 0.651471
| 0.49951
| 0
| 0.007361
| 0.218674
| 22,952
| 503
| 158
| 45.630219
| 0.788937
| 0.008714
| 0
| 0.431871
| 0
| 0
| 0.055741
| 0.001365
| 0
| 0
| 0
| 0
| 0.540416
| 1
| 0.055427
| false
| 0.009238
| 0.006928
| 0
| 0.064665
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5960fac13e62fc0f3c13d38970fb9a591652d2d8
| 6,834
|
py
|
Python
|
medea/tests/test_mapper.py
|
kevinbeaty/medea
|
c200be0e781299d0f9064f5cb44b90d5c01dcfb3
|
[
"MIT"
] | 3
|
2016-12-25T19:40:58.000Z
|
2021-02-22T08:46:55.000Z
|
medea/tests/test_mapper.py
|
kevinbeaty/medea
|
c200be0e781299d0f9064f5cb44b90d5c01dcfb3
|
[
"MIT"
] | null | null | null |
medea/tests/test_mapper.py
|
kevinbeaty/medea
|
c200be0e781299d0f9064f5cb44b90d5c01dcfb3
|
[
"MIT"
] | null | null | null |
"""
MedeaMapper test
"""
from . import Person
from .. import MedeaMapper
def test_object_args_to_json():
bob = Person('Bob', 'Hope', '123 Main', '123', '1903-05-29')
mapper = MedeaMapper('first_name', 'last_name')
bob_json = {
'first_name': 'Bob',
'last_name': 'Hope'}
assert mapper.to_json(bob) == bob_json
mapper = MedeaMapper('first_name', 'last_name',
'address', 'phone_number', 'dob')
bob_json = {
'first_name': 'Bob',
'last_name': 'Hope',
'address': '123 Main',
'phone_number': '123',
'dob': '1903-05-29'}
assert mapper.to_json(bob) == bob_json
def test_object_args_from_json():
bob = Person('Bob', 'Hope', '123 Main', '123', '1903-05-29')
assert bob.first_name == 'Bob'
assert bob.last_name == 'Hope'
assert bob.address == '123 Main'
assert bob.phone_number == '123'
assert bob.dob == '1903-05-29'
fred = Person('Fred', 'Rodgers', '234 Rock', '456', '1928-03-20')
assert fred.first_name == 'Fred'
assert fred.last_name == 'Rodgers'
assert fred.address == '234 Rock'
assert fred.phone_number == '456'
assert fred.dob == '1928-03-20'
bob_json = {
'first_name': 'Bob',
'last_name': 'Hope'}
bob_json_full = {
'first_name': 'Bob',
'last_name': 'Hope',
'address': '123 Main',
'phone_number': '123',
'dob': '1903-05-29'}
fred_json = {
'first_name': 'Fred',
'last_name': 'Rodgers'}
fred_json_full = {
'first_name': 'Fred',
'last_name': 'Rodgers',
'address': '234 Rock',
'phone_number': '456',
'dob': '1928-03-20'}
mapper = MedeaMapper('first_name', 'last_name')
mapper_full = MedeaMapper('first_name', 'last_name',
'address', 'phone_number', 'dob')
assert mapper.to_json(bob) == bob_json
assert mapper.to_json(fred) == fred_json
assert mapper_full.to_json(bob) == bob_json_full
assert mapper_full.to_json(fred) == fred_json_full
# Override Bob's name from Fred
mapper.from_json(fred_json, bob)
# Mapper only serializes names
assert mapper.to_json(bob) == fred_json
assert mapper_full.to_json(bob) != bob_json_full
assert bob.first_name == 'Fred'
assert bob.last_name == 'Rodgers'
assert bob.address == '123 Main'
assert bob.phone_number == '123'
assert bob.dob == '1903-05-29'
# Revert back to Bob's name
mapper.from_json(bob_json, bob)
assert mapper.to_json(bob) == bob_json
assert mapper_full.to_json(bob) == bob_json_full
# Map Fred onto Bob using only name
mapper_full.from_json(fred_json, bob)
assert mapper.to_json(bob) == fred_json
assert mapper_full.to_json(bob) != bob_json_full
assert bob.first_name == 'Fred'
assert bob.last_name == 'Rodgers'
assert bob.address == '123 Main'
assert bob.phone_number == '123'
assert bob.dob == '1903-05-29'
# Revert back to Bob's name
mapper.from_json(bob_json, bob)
assert mapper.to_json(bob) == bob_json
assert mapper_full.to_json(bob) == bob_json_full
# Map Fred onto Bob
mapper_full.from_json(fred_json_full, bob)
assert mapper_full.to_json(bob) == fred_json_full
# Map Bob back onto Bob
mapper_full.from_json(bob_json_full, bob)
assert mapper_full.to_json(bob) == bob_json_full
def test_object_kwargs_to_json():
bob = Person('Bob', 'Hope', '123 Main', '123', '1903-05-29')
mapper = MedeaMapper(first_name='firstName', last_name='lastName')
bob_json = {
'firstName': 'Bob',
'lastName': 'Hope'}
assert mapper.to_json(bob) == bob_json
mapper = MedeaMapper('address', 'dob',
first_name='firstName', last_name='lastName')
bob_json = {
'firstName': 'Bob',
'lastName': 'Hope',
'address': '123 Main',
'dob': '1903-05-29'}
assert mapper.to_json(bob) == bob_json
def test_object_kwargs_from_json():
bob = Person('Bob', 'Hope', '123 Main', '123', '1903-05-29')
assert bob.first_name == 'Bob'
assert bob.last_name == 'Hope'
assert bob.address == '123 Main'
assert bob.phone_number == '123'
assert bob.dob == '1903-05-29'
fred = Person('Fred', 'Rodgers', '234 Rock', '456', '1928-03-20')
assert fred.first_name == 'Fred'
assert fred.last_name == 'Rodgers'
assert fred.address == '234 Rock'
assert fred.phone_number == '456'
assert fred.dob == '1928-03-20'
bob_json = {
'firstName': 'Bob',
'lastName': 'Hope'}
bob_json_full = {
'firstName': 'Bob',
'lastName': 'Hope',
'address': '123 Main',
'phoneNumber': '123',
'DOB': '1903-05-29'}
fred_json = {
'firstName': 'Fred',
'lastName': 'Rodgers'}
fred_json_full = {
'firstName': 'Fred',
'lastName': 'Rodgers',
'address': '234 Rock',
'phoneNumber': '456',
'DOB': '1928-03-20'}
mapper = MedeaMapper(first_name='firstName', last_name='lastName')
mapper_full = MedeaMapper('address',
first_name='firstName', last_name='lastName',
phone_number='phoneNumber', dob='DOB')
assert mapper.to_json(bob) == bob_json
assert mapper.to_json(fred) == fred_json
assert mapper_full.to_json(bob) == bob_json_full
assert mapper_full.to_json(fred) == fred_json_full
# Override Bob's name from Fred
mapper.from_json(fred_json, bob)
# Mapper only serializes names
assert mapper.to_json(bob) == fred_json
assert mapper_full.to_json(bob) != bob_json_full
assert bob.first_name == 'Fred'
assert bob.last_name == 'Rodgers'
assert bob.address == '123 Main'
assert bob.phone_number == '123'
assert bob.dob == '1903-05-29'
# Revert back to Bob's name
mapper.from_json(bob_json, bob)
assert mapper.to_json(bob) == bob_json
assert mapper_full.to_json(bob) == bob_json_full
# Map Fred onto Bob using only name
mapper_full.from_json(fred_json, bob)
assert mapper.to_json(bob) == fred_json
assert mapper_full.to_json(bob) != bob_json_full
assert bob.first_name == 'Fred'
assert bob.last_name == 'Rodgers'
assert bob.address == '123 Main'
assert bob.phone_number == '123'
assert bob.dob == '1903-05-29'
# Revert back to Bob's name
mapper.from_json(bob_json, bob)
assert mapper.to_json(bob) == bob_json
assert mapper_full.to_json(bob) == bob_json_full
# Map Fred onto Bob
mapper_full.from_json(fred_json_full, bob)
assert mapper_full.to_json(bob) == fred_json_full
# Map Bob back onto Bob
mapper_full.from_json(bob_json_full, bob)
assert mapper_full.to_json(bob) == bob_json_full
| 29.973684
| 75
| 0.616769
| 931
| 6,834
| 4.288937
| 0.052632
| 0.080641
| 0.067618
| 0.066116
| 0.916354
| 0.916354
| 0.883797
| 0.863261
| 0.842474
| 0.801402
| 0
| 0.053444
| 0.241586
| 6,834
| 227
| 76
| 30.105727
| 0.716959
| 0.056629
| 0
| 0.858025
| 0
| 0
| 0.186741
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.024691
| false
| 0
| 0.012346
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
599b622747b56560dc91cef6be7556b3dcbbef00
| 154
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/_pkg0_0_0_0/_pkg0_0_0_0_0/__init__.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/_pkg0_0_0_0/_pkg0_0_0_0_0/__init__.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/_pkg0_0_0_0/_pkg0_0_0_0_0/__init__.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from ._mod0_0_0_0_0_0 import *
from ._mod0_0_0_0_0_1 import *
from ._mod0_0_0_0_0_2 import *
from ._mod0_0_0_0_0_3 import *
from ._mod0_0_0_0_0_4 import *
| 30.8
| 30
| 0.811688
| 40
| 154
| 2.375
| 0.2
| 0.336842
| 0.347368
| 0.252632
| 0.884211
| 0.884211
| 0.757895
| 0
| 0
| 0
| 0
| 0.222222
| 0.123377
| 154
| 5
| 31
| 30.8
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.